##// END OF EJS Templates
global: mass rewrite to use modern octal syntax...
Gregory Szorc -
r25658:e9303674 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,208 +1,208
1 1 # darcs.py - darcs support for the convert extension
2 2 #
3 3 # Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from common import NoRepo, checktool, commandline, commit, converter_source
9 9 from mercurial.i18n import _
10 10 from mercurial import util
11 11 import os, shutil, tempfile, re, errno
12 12
13 13 # The naming drift of ElementTree is fun!
14 14
15 15 try:
16 16 from xml.etree.cElementTree import ElementTree, XMLParser
17 17 except ImportError:
18 18 try:
19 19 from xml.etree.ElementTree import ElementTree, XMLParser
20 20 except ImportError:
21 21 try:
22 22 from elementtree.cElementTree import ElementTree, XMLParser
23 23 except ImportError:
24 24 try:
25 25 from elementtree.ElementTree import ElementTree, XMLParser
26 26 except ImportError:
27 27 pass
28 28
29 29 class darcs_source(converter_source, commandline):
30 30 def __init__(self, ui, path, rev=None):
31 31 converter_source.__init__(self, ui, path, rev=rev)
32 32 commandline.__init__(self, ui, 'darcs')
33 33
34 34 # check for _darcs, ElementTree so that we can easily skip
35 35 # test-convert-darcs if ElementTree is not around
36 36 if not os.path.exists(os.path.join(path, '_darcs')):
37 37 raise NoRepo(_("%s does not look like a darcs repository") % path)
38 38
39 39 checktool('darcs')
40 40 version = self.run0('--version').splitlines()[0].strip()
41 41 if version < '2.1':
42 42 raise util.Abort(_('darcs version 2.1 or newer needed (found %r)') %
43 43 version)
44 44
45 45 if "ElementTree" not in globals():
46 46 raise util.Abort(_("Python ElementTree module is not available"))
47 47
48 48 self.path = os.path.realpath(path)
49 49
50 50 self.lastrev = None
51 51 self.changes = {}
52 52 self.parents = {}
53 53 self.tags = {}
54 54
55 55 # Check darcs repository format
56 56 format = self.format()
57 57 if format:
58 58 if format in ('darcs-1.0', 'hashed'):
59 59 raise NoRepo(_("%s repository format is unsupported, "
60 60 "please upgrade") % format)
61 61 else:
62 62 self.ui.warn(_('failed to detect repository format!'))
63 63
64 64 def before(self):
65 65 self.tmppath = tempfile.mkdtemp(
66 66 prefix='convert-' + os.path.basename(self.path) + '-')
67 67 output, status = self.run('init', repodir=self.tmppath)
68 68 self.checkexit(status)
69 69
70 70 tree = self.xml('changes', xml_output=True, summary=True,
71 71 repodir=self.path)
72 72 tagname = None
73 73 child = None
74 74 for elt in tree.findall('patch'):
75 75 node = elt.get('hash')
76 76 name = elt.findtext('name', '')
77 77 if name.startswith('TAG '):
78 78 tagname = name[4:].strip()
79 79 elif tagname is not None:
80 80 self.tags[tagname] = node
81 81 tagname = None
82 82 self.changes[node] = elt
83 83 self.parents[child] = [node]
84 84 child = node
85 85 self.parents[child] = []
86 86
87 87 def after(self):
88 88 self.ui.debug('cleaning up %s\n' % self.tmppath)
89 89 shutil.rmtree(self.tmppath, ignore_errors=True)
90 90
91 91 def recode(self, s, encoding=None):
92 92 if isinstance(s, unicode):
93 93 # XMLParser returns unicode objects for anything it can't
94 94 # encode into ASCII. We convert them back to str to get
95 95 # recode's normal conversion behavior.
96 96 s = s.encode('latin-1')
97 97 return super(darcs_source, self).recode(s, encoding)
98 98
99 99 def xml(self, cmd, **kwargs):
100 100 # NOTE: darcs is currently encoding agnostic and will print
101 101 # patch metadata byte-for-byte, even in the XML changelog.
102 102 etree = ElementTree()
103 103 # While we are decoding the XML as latin-1 to be as liberal as
104 104 # possible, etree will still raise an exception if any
105 105 # non-printable characters are in the XML changelog.
106 106 parser = XMLParser(encoding='latin-1')
107 107 p = self._run(cmd, **kwargs)
108 108 etree.parse(p.stdout, parser=parser)
109 109 p.wait()
110 110 self.checkexit(p.returncode)
111 111 return etree.getroot()
112 112
113 113 def format(self):
114 114 output, status = self.run('show', 'repo', no_files=True,
115 115 repodir=self.path)
116 116 self.checkexit(status)
117 117 m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
118 118 if not m:
119 119 return None
120 120 return ','.join(sorted(f.strip() for f in m.group(1).split(',')))
121 121
122 122 def manifest(self):
123 123 man = []
124 124 output, status = self.run('show', 'files', no_directories=True,
125 125 repodir=self.tmppath)
126 126 self.checkexit(status)
127 127 for line in output.split('\n'):
128 128 path = line[2:]
129 129 if path:
130 130 man.append(path)
131 131 return man
132 132
133 133 def getheads(self):
134 134 return self.parents[None]
135 135
136 136 def getcommit(self, rev):
137 137 elt = self.changes[rev]
138 138 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
139 139 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
140 140 # etree can return unicode objects for name, comment, and author,
141 141 # so recode() is used to ensure str objects are emitted.
142 142 return commit(author=self.recode(elt.get('author')),
143 143 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
144 144 desc=self.recode(desc).strip(),
145 145 parents=self.parents[rev])
146 146
147 147 def pull(self, rev):
148 148 output, status = self.run('pull', self.path, all=True,
149 149 match='hash %s' % rev,
150 150 no_test=True, no_posthook=True,
151 151 external_merge='/bin/false',
152 152 repodir=self.tmppath)
153 153 if status:
154 154 if output.find('We have conflicts in') == -1:
155 155 self.checkexit(status, output)
156 156 output, status = self.run('revert', all=True, repodir=self.tmppath)
157 157 self.checkexit(status, output)
158 158
159 159 def getchanges(self, rev, full):
160 160 if full:
161 161 raise util.Abort(_("convert from darcs do not support --full"))
162 162 copies = {}
163 163 changes = []
164 164 man = None
165 165 for elt in self.changes[rev].find('summary').getchildren():
166 166 if elt.tag in ('add_directory', 'remove_directory'):
167 167 continue
168 168 if elt.tag == 'move':
169 169 if man is None:
170 170 man = self.manifest()
171 171 source, dest = elt.get('from'), elt.get('to')
172 172 if source in man:
173 173 # File move
174 174 changes.append((source, rev))
175 175 changes.append((dest, rev))
176 176 copies[dest] = source
177 177 else:
178 178 # Directory move, deduce file moves from manifest
179 179 source = source + '/'
180 180 for f in man:
181 181 if not f.startswith(source):
182 182 continue
183 183 fdest = dest + '/' + f[len(source):]
184 184 changes.append((f, rev))
185 185 changes.append((fdest, rev))
186 186 copies[fdest] = f
187 187 else:
188 188 changes.append((elt.text.strip(), rev))
189 189 self.pull(rev)
190 190 self.lastrev = rev
191 191 return sorted(changes), copies, set()
192 192
193 193 def getfile(self, name, rev):
194 194 if rev != self.lastrev:
195 195 raise util.Abort(_('internal calling inconsistency'))
196 196 path = os.path.join(self.tmppath, name)
197 197 try:
198 198 data = util.readfile(path)
199 199 mode = os.lstat(path).st_mode
200 200 except IOError, inst:
201 201 if inst.errno == errno.ENOENT:
202 202 return None, None
203 203 raise
204 mode = (mode & 0111) and 'x' or ''
204 mode = (mode & 0o111) and 'x' or ''
205 205 return data, mode
206 206
207 207 def gettags(self):
208 208 return self.tags
@@ -1,342 +1,342
1 1 # gnuarch.py - GNU Arch support for the convert extension
2 2 #
3 3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
4 4 # and others
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from common import NoRepo, commandline, commit, converter_source
10 10 from mercurial.i18n import _
11 11 from mercurial import encoding, util
12 12 import os, shutil, tempfile, stat
13 13 from email.Parser import Parser
14 14
15 15 class gnuarch_source(converter_source, commandline):
16 16
17 17 class gnuarch_rev(object):
18 18 def __init__(self, rev):
19 19 self.rev = rev
20 20 self.summary = ''
21 21 self.date = None
22 22 self.author = ''
23 23 self.continuationof = None
24 24 self.add_files = []
25 25 self.mod_files = []
26 26 self.del_files = []
27 27 self.ren_files = {}
28 28 self.ren_dirs = {}
29 29
30 30 def __init__(self, ui, path, rev=None):
31 31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
32 32
33 33 if not os.path.exists(os.path.join(path, '{arch}')):
34 34 raise NoRepo(_("%s does not look like a GNU Arch repository")
35 35 % path)
36 36
37 37 # Could use checktool, but we want to check for baz or tla.
38 38 self.execmd = None
39 39 if util.findexe('baz'):
40 40 self.execmd = 'baz'
41 41 else:
42 42 if util.findexe('tla'):
43 43 self.execmd = 'tla'
44 44 else:
45 45 raise util.Abort(_('cannot find a GNU Arch tool'))
46 46
47 47 commandline.__init__(self, ui, self.execmd)
48 48
49 49 self.path = os.path.realpath(path)
50 50 self.tmppath = None
51 51
52 52 self.treeversion = None
53 53 self.lastrev = None
54 54 self.changes = {}
55 55 self.parents = {}
56 56 self.tags = {}
57 57 self.catlogparser = Parser()
58 58 self.encoding = encoding.encoding
59 59 self.archives = []
60 60
61 61 def before(self):
62 62 # Get registered archives
63 63 self.archives = [i.rstrip('\n')
64 64 for i in self.runlines0('archives', '-n')]
65 65
66 66 if self.execmd == 'tla':
67 67 output = self.run0('tree-version', self.path)
68 68 else:
69 69 output = self.run0('tree-version', '-d', self.path)
70 70 self.treeversion = output.strip()
71 71
72 72 # Get name of temporary directory
73 73 version = self.treeversion.split('/')
74 74 self.tmppath = os.path.join(tempfile.gettempdir(),
75 75 'hg-%s' % version[1])
76 76
77 77 # Generate parents dictionary
78 78 self.parents[None] = []
79 79 treeversion = self.treeversion
80 80 child = None
81 81 while treeversion:
82 82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
83 83
84 84 archive = treeversion.split('/')[0]
85 85 if archive not in self.archives:
86 86 self.ui.status(_('tree analysis stopped because it points to '
87 87 'an unregistered archive %s...\n') % archive)
88 88 break
89 89
90 90 # Get the complete list of revisions for that tree version
91 91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
92 92 self.checkexit(status, 'failed retrieving revisions for %s'
93 93 % treeversion)
94 94
95 95 # No new iteration unless a revision has a continuation-of header
96 96 treeversion = None
97 97
98 98 for l in output:
99 99 rev = l.strip()
100 100 self.changes[rev] = self.gnuarch_rev(rev)
101 101 self.parents[rev] = []
102 102
103 103 # Read author, date and summary
104 104 catlog, status = self.run('cat-log', '-d', self.path, rev)
105 105 if status:
106 106 catlog = self.run0('cat-archive-log', rev)
107 107 self._parsecatlog(catlog, rev)
108 108
109 109 # Populate the parents map
110 110 self.parents[child].append(rev)
111 111
112 112 # Keep track of the current revision as the child of the next
113 113 # revision scanned
114 114 child = rev
115 115
116 116 # Check if we have to follow the usual incremental history
117 117 # or if we have to 'jump' to a different treeversion given
118 118 # by the continuation-of header.
119 119 if self.changes[rev].continuationof:
120 120 treeversion = '--'.join(
121 121 self.changes[rev].continuationof.split('--')[:-1])
122 122 break
123 123
124 124 # If we reached a base-0 revision w/o any continuation-of
125 125 # header, it means the tree history ends here.
126 126 if rev[-6:] == 'base-0':
127 127 break
128 128
129 129 def after(self):
130 130 self.ui.debug('cleaning up %s\n' % self.tmppath)
131 131 shutil.rmtree(self.tmppath, ignore_errors=True)
132 132
133 133 def getheads(self):
134 134 return self.parents[None]
135 135
136 136 def getfile(self, name, rev):
137 137 if rev != self.lastrev:
138 138 raise util.Abort(_('internal calling inconsistency'))
139 139
140 140 if not os.path.lexists(os.path.join(self.tmppath, name)):
141 141 return None, None
142 142
143 143 return self._getfile(name, rev)
144 144
145 145 def getchanges(self, rev, full):
146 146 if full:
147 147 raise util.Abort(_("convert from arch do not support --full"))
148 148 self._update(rev)
149 149 changes = []
150 150 copies = {}
151 151
152 152 for f in self.changes[rev].add_files:
153 153 changes.append((f, rev))
154 154
155 155 for f in self.changes[rev].mod_files:
156 156 changes.append((f, rev))
157 157
158 158 for f in self.changes[rev].del_files:
159 159 changes.append((f, rev))
160 160
161 161 for src in self.changes[rev].ren_files:
162 162 to = self.changes[rev].ren_files[src]
163 163 changes.append((src, rev))
164 164 changes.append((to, rev))
165 165 copies[to] = src
166 166
167 167 for src in self.changes[rev].ren_dirs:
168 168 to = self.changes[rev].ren_dirs[src]
169 169 chgs, cps = self._rendirchanges(src, to)
170 170 changes += [(f, rev) for f in chgs]
171 171 copies.update(cps)
172 172
173 173 self.lastrev = rev
174 174 return sorted(set(changes)), copies, set()
175 175
176 176 def getcommit(self, rev):
177 177 changes = self.changes[rev]
178 178 return commit(author=changes.author, date=changes.date,
179 179 desc=changes.summary, parents=self.parents[rev], rev=rev)
180 180
181 181 def gettags(self):
182 182 return self.tags
183 183
184 184 def _execute(self, cmd, *args, **kwargs):
185 185 cmdline = [self.execmd, cmd]
186 186 cmdline += args
187 187 cmdline = [util.shellquote(arg) for arg in cmdline]
188 188 cmdline += ['>', os.devnull, '2>', os.devnull]
189 189 cmdline = util.quotecommand(' '.join(cmdline))
190 190 self.ui.debug(cmdline, '\n')
191 191 return os.system(cmdline)
192 192
193 193 def _update(self, rev):
194 194 self.ui.debug('applying revision %s...\n' % rev)
195 195 changeset, status = self.runlines('replay', '-d', self.tmppath,
196 196 rev)
197 197 if status:
198 198 # Something went wrong while merging (baz or tla
199 199 # issue?), get latest revision and try from there
200 200 shutil.rmtree(self.tmppath, ignore_errors=True)
201 201 self._obtainrevision(rev)
202 202 else:
203 203 old_rev = self.parents[rev][0]
204 204 self.ui.debug('computing changeset between %s and %s...\n'
205 205 % (old_rev, rev))
206 206 self._parsechangeset(changeset, rev)
207 207
208 208 def _getfile(self, name, rev):
209 209 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
210 210 if stat.S_ISLNK(mode):
211 211 data = os.readlink(os.path.join(self.tmppath, name))
212 212 if mode:
213 213 mode = 'l'
214 214 else:
215 215 mode = ''
216 216 else:
217 217 data = open(os.path.join(self.tmppath, name), 'rb').read()
218 mode = (mode & 0111) and 'x' or ''
218 mode = (mode & 0o111) and 'x' or ''
219 219 return data, mode
220 220
221 221 def _exclude(self, name):
222 222 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
223 223 for exc in exclude:
224 224 if name.find(exc) != -1:
225 225 return True
226 226 return False
227 227
228 228 def _readcontents(self, path):
229 229 files = []
230 230 contents = os.listdir(path)
231 231 while len(contents) > 0:
232 232 c = contents.pop()
233 233 p = os.path.join(path, c)
234 234 # os.walk could be used, but here we avoid internal GNU
235 235 # Arch files and directories, thus saving a lot time.
236 236 if not self._exclude(p):
237 237 if os.path.isdir(p):
238 238 contents += [os.path.join(c, f) for f in os.listdir(p)]
239 239 else:
240 240 files.append(c)
241 241 return files
242 242
243 243 def _rendirchanges(self, src, dest):
244 244 changes = []
245 245 copies = {}
246 246 files = self._readcontents(os.path.join(self.tmppath, dest))
247 247 for f in files:
248 248 s = os.path.join(src, f)
249 249 d = os.path.join(dest, f)
250 250 changes.append(s)
251 251 changes.append(d)
252 252 copies[d] = s
253 253 return changes, copies
254 254
255 255 def _obtainrevision(self, rev):
256 256 self.ui.debug('obtaining revision %s...\n' % rev)
257 257 output = self._execute('get', rev, self.tmppath)
258 258 self.checkexit(output)
259 259 self.ui.debug('analyzing revision %s...\n' % rev)
260 260 files = self._readcontents(self.tmppath)
261 261 self.changes[rev].add_files += files
262 262
263 263 def _stripbasepath(self, path):
264 264 if path.startswith('./'):
265 265 return path[2:]
266 266 return path
267 267
268 268 def _parsecatlog(self, data, rev):
269 269 try:
270 270 catlog = self.catlogparser.parsestr(data)
271 271
272 272 # Commit date
273 273 self.changes[rev].date = util.datestr(
274 274 util.strdate(catlog['Standard-date'],
275 275 '%Y-%m-%d %H:%M:%S'))
276 276
277 277 # Commit author
278 278 self.changes[rev].author = self.recode(catlog['Creator'])
279 279
280 280 # Commit description
281 281 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
282 282 catlog.get_payload()))
283 283 self.changes[rev].summary = self.recode(self.changes[rev].summary)
284 284
285 285 # Commit revision origin when dealing with a branch or tag
286 286 if 'Continuation-of' in catlog:
287 287 self.changes[rev].continuationof = self.recode(
288 288 catlog['Continuation-of'])
289 289 except Exception:
290 290 raise util.Abort(_('could not parse cat-log of %s') % rev)
291 291
292 292 def _parsechangeset(self, data, rev):
293 293 for l in data:
294 294 l = l.strip()
295 295 # Added file (ignore added directory)
296 296 if l.startswith('A') and not l.startswith('A/'):
297 297 file = self._stripbasepath(l[1:].strip())
298 298 if not self._exclude(file):
299 299 self.changes[rev].add_files.append(file)
300 300 # Deleted file (ignore deleted directory)
301 301 elif l.startswith('D') and not l.startswith('D/'):
302 302 file = self._stripbasepath(l[1:].strip())
303 303 if not self._exclude(file):
304 304 self.changes[rev].del_files.append(file)
305 305 # Modified binary file
306 306 elif l.startswith('Mb'):
307 307 file = self._stripbasepath(l[2:].strip())
308 308 if not self._exclude(file):
309 309 self.changes[rev].mod_files.append(file)
310 310 # Modified link
311 311 elif l.startswith('M->'):
312 312 file = self._stripbasepath(l[3:].strip())
313 313 if not self._exclude(file):
314 314 self.changes[rev].mod_files.append(file)
315 315 # Modified file
316 316 elif l.startswith('M'):
317 317 file = self._stripbasepath(l[1:].strip())
318 318 if not self._exclude(file):
319 319 self.changes[rev].mod_files.append(file)
320 320 # Renamed file (or link)
321 321 elif l.startswith('=>'):
322 322 files = l[2:].strip().split(' ')
323 323 if len(files) == 1:
324 324 files = l[2:].strip().split('\t')
325 325 src = self._stripbasepath(files[0])
326 326 dst = self._stripbasepath(files[1])
327 327 if not self._exclude(src) and not self._exclude(dst):
328 328 self.changes[rev].ren_files[src] = dst
329 329 # Conversion from file to link or from link to file (modified)
330 330 elif l.startswith('ch'):
331 331 file = self._stripbasepath(l[2:].strip())
332 332 if not self._exclude(file):
333 333 self.changes[rev].mod_files.append(file)
334 334 # Renamed directory
335 335 elif l.startswith('/>'):
336 336 dirs = l[2:].strip().split(' ')
337 337 if len(dirs) == 1:
338 338 dirs = l[2:].strip().split('\t')
339 339 src = self._stripbasepath(dirs[0])
340 340 dst = self._stripbasepath(dirs[1])
341 341 if not self._exclude(src) and not self._exclude(dst):
342 342 self.changes[rev].ren_dirs[src] = dst
@@ -1,1385 +1,1385
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10 10
11 11 import os
12 12 import copy
13 13
14 14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
15 15 archival, pathutil, revset
16 16 from mercurial.i18n import _
17 17
18 18 import lfutil
19 19 import lfcommands
20 20 import basestore
21 21
22 22 # -- Utility functions: commonly/repeatedly needed functionality ---------------
23 23
24 24 def composelargefilematcher(match, manifest):
25 25 '''create a matcher that matches only the largefiles in the original
26 26 matcher'''
27 27 m = copy.copy(match)
28 28 lfile = lambda f: lfutil.standin(f) in manifest
29 29 m._files = filter(lfile, m._files)
30 30 m._fileroots = set(m._files)
31 31 m._always = False
32 32 origmatchfn = m.matchfn
33 33 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
34 34 return m
35 35
36 36 def composenormalfilematcher(match, manifest, exclude=None):
37 37 excluded = set()
38 38 if exclude is not None:
39 39 excluded.update(exclude)
40 40
41 41 m = copy.copy(match)
42 42 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
43 43 manifest or f in excluded)
44 44 m._files = filter(notlfile, m._files)
45 45 m._fileroots = set(m._files)
46 46 m._always = False
47 47 origmatchfn = m.matchfn
48 48 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
49 49 return m
50 50
51 51 def installnormalfilesmatchfn(manifest):
52 52 '''installmatchfn with a matchfn that ignores all largefiles'''
53 53 def overridematch(ctx, pats=[], opts={}, globbed=False,
54 54 default='relpath', badfn=None):
55 55 match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn)
56 56 return composenormalfilematcher(match, manifest)
57 57 oldmatch = installmatchfn(overridematch)
58 58
59 59 def installmatchfn(f):
60 60 '''monkey patch the scmutil module with a custom match function.
61 61 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
62 62 oldmatch = scmutil.match
63 63 setattr(f, 'oldmatch', oldmatch)
64 64 scmutil.match = f
65 65 return oldmatch
66 66
67 67 def restorematchfn():
68 68 '''restores scmutil.match to what it was before installmatchfn
69 69 was called. no-op if scmutil.match is its original function.
70 70
71 71 Note that n calls to installmatchfn will require n calls to
72 72 restore the original matchfn.'''
73 73 scmutil.match = getattr(scmutil.match, 'oldmatch')
74 74
75 75 def installmatchandpatsfn(f):
76 76 oldmatchandpats = scmutil.matchandpats
77 77 setattr(f, 'oldmatchandpats', oldmatchandpats)
78 78 scmutil.matchandpats = f
79 79 return oldmatchandpats
80 80
81 81 def restorematchandpatsfn():
82 82 '''restores scmutil.matchandpats to what it was before
83 83 installmatchandpatsfn was called. No-op if scmutil.matchandpats
84 84 is its original function.
85 85
86 86 Note that n calls to installmatchandpatsfn will require n calls
87 87 to restore the original matchfn.'''
88 88 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
89 89 scmutil.matchandpats)
90 90
91 91 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
92 92 large = opts.get('large')
93 93 lfsize = lfutil.getminsize(
94 94 ui, lfutil.islfilesrepo(repo), opts.get('lfsize'))
95 95
96 96 lfmatcher = None
97 97 if lfutil.islfilesrepo(repo):
98 98 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
99 99 if lfpats:
100 100 lfmatcher = match_.match(repo.root, '', list(lfpats))
101 101
102 102 lfnames = []
103 103 m = matcher
104 104
105 105 wctx = repo[None]
106 106 for f in repo.walk(match_.badmatch(m, lambda x, y: None)):
107 107 exact = m.exact(f)
108 108 lfile = lfutil.standin(f) in wctx
109 109 nfile = f in wctx
110 110 exists = lfile or nfile
111 111
112 112 # addremove in core gets fancy with the name, add doesn't
113 113 if isaddremove:
114 114 name = m.uipath(f)
115 115 else:
116 116 name = m.rel(f)
117 117
118 118 # Don't warn the user when they attempt to add a normal tracked file.
119 119 # The normal add code will do that for us.
120 120 if exact and exists:
121 121 if lfile:
122 122 ui.warn(_('%s already a largefile\n') % name)
123 123 continue
124 124
125 125 if (exact or not exists) and not lfutil.isstandin(f):
126 126 # In case the file was removed previously, but not committed
127 127 # (issue3507)
128 128 if not repo.wvfs.exists(f):
129 129 continue
130 130
131 131 abovemin = (lfsize and
132 132 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
133 133 if large or abovemin or (lfmatcher and lfmatcher(f)):
134 134 lfnames.append(f)
135 135 if ui.verbose or not exact:
136 136 ui.status(_('adding %s as a largefile\n') % name)
137 137
138 138 bad = []
139 139
140 140 # Need to lock, otherwise there could be a race condition between
141 141 # when standins are created and added to the repo.
142 142 wlock = repo.wlock()
143 143 try:
144 144 if not opts.get('dry_run'):
145 145 standins = []
146 146 lfdirstate = lfutil.openlfdirstate(ui, repo)
147 147 for f in lfnames:
148 148 standinname = lfutil.standin(f)
149 149 lfutil.writestandin(repo, standinname, hash='',
150 150 executable=lfutil.getexecutable(repo.wjoin(f)))
151 151 standins.append(standinname)
152 152 if lfdirstate[f] == 'r':
153 153 lfdirstate.normallookup(f)
154 154 else:
155 155 lfdirstate.add(f)
156 156 lfdirstate.write()
157 157 bad += [lfutil.splitstandin(f)
158 158 for f in repo[None].add(standins)
159 159 if f in m.files()]
160 160
161 161 added = [f for f in lfnames if f not in bad]
162 162 finally:
163 163 wlock.release()
164 164 return added, bad
165 165
166 166 def removelargefiles(ui, repo, isaddremove, matcher, **opts):
167 167 after = opts.get('after')
168 168 m = composelargefilematcher(matcher, repo[None].manifest())
169 169 try:
170 170 repo.lfstatus = True
171 171 s = repo.status(match=m, clean=not isaddremove)
172 172 finally:
173 173 repo.lfstatus = False
174 174 manifest = repo[None].manifest()
175 175 modified, added, deleted, clean = [[f for f in list
176 176 if lfutil.standin(f) in manifest]
177 177 for list in (s.modified, s.added,
178 178 s.deleted, s.clean)]
179 179
180 180 def warn(files, msg):
181 181 for f in files:
182 182 ui.warn(msg % m.rel(f))
183 183 return int(len(files) > 0)
184 184
185 185 result = 0
186 186
187 187 if after:
188 188 remove = deleted
189 189 result = warn(modified + added + clean,
190 190 _('not removing %s: file still exists\n'))
191 191 else:
192 192 remove = deleted + clean
193 193 result = warn(modified, _('not removing %s: file is modified (use -f'
194 194 ' to force removal)\n'))
195 195 result = warn(added, _('not removing %s: file has been marked for add'
196 196 ' (use forget to undo)\n')) or result
197 197
198 198 # Need to lock because standin files are deleted then removed from the
199 199 # repository and we could race in-between.
200 200 wlock = repo.wlock()
201 201 try:
202 202 lfdirstate = lfutil.openlfdirstate(ui, repo)
203 203 for f in sorted(remove):
204 204 if ui.verbose or not m.exact(f):
205 205 # addremove in core gets fancy with the name, remove doesn't
206 206 if isaddremove:
207 207 name = m.uipath(f)
208 208 else:
209 209 name = m.rel(f)
210 210 ui.status(_('removing %s\n') % name)
211 211
212 212 if not opts.get('dry_run'):
213 213 if not after:
214 214 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
215 215
216 216 if opts.get('dry_run'):
217 217 return result
218 218
219 219 remove = [lfutil.standin(f) for f in remove]
220 220 # If this is being called by addremove, let the original addremove
221 221 # function handle this.
222 222 if not isaddremove:
223 223 for f in remove:
224 224 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
225 225 repo[None].forget(remove)
226 226
227 227 for f in remove:
228 228 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
229 229 False)
230 230
231 231 lfdirstate.write()
232 232 finally:
233 233 wlock.release()
234 234
235 235 return result
236 236
237 237 # For overriding mercurial.hgweb.webcommands so that largefiles will
238 238 # appear at their right place in the manifests.
239 239 def decodepath(orig, path):
240 240 return lfutil.splitstandin(path) or path
241 241
242 242 # -- Wrappers: modify existing commands --------------------------------
243 243
244 244 def overrideadd(orig, ui, repo, *pats, **opts):
245 245 if opts.get('normal') and opts.get('large'):
246 246 raise util.Abort(_('--normal cannot be used with --large'))
247 247 return orig(ui, repo, *pats, **opts)
248 248
249 249 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
250 250 # The --normal flag short circuits this override
251 251 if opts.get('normal'):
252 252 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
253 253
254 254 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
255 255 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
256 256 ladded)
257 257 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
258 258
259 259 bad.extend(f for f in lbad)
260 260 return bad
261 261
262 262 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos):
263 263 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
264 264 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos)
265 265 return removelargefiles(ui, repo, False, matcher, after=after,
266 266 force=force) or result
267 267
268 268 def overridestatusfn(orig, repo, rev2, **opts):
269 269 try:
270 270 repo._repo.lfstatus = True
271 271 return orig(repo, rev2, **opts)
272 272 finally:
273 273 repo._repo.lfstatus = False
274 274
275 275 def overridestatus(orig, ui, repo, *pats, **opts):
276 276 try:
277 277 repo.lfstatus = True
278 278 return orig(ui, repo, *pats, **opts)
279 279 finally:
280 280 repo.lfstatus = False
281 281
282 282 def overridedirty(orig, repo, ignoreupdate=False):
283 283 try:
284 284 repo._repo.lfstatus = True
285 285 return orig(repo, ignoreupdate)
286 286 finally:
287 287 repo._repo.lfstatus = False
288 288
289 289 def overridelog(orig, ui, repo, *pats, **opts):
290 290 def overridematchandpats(ctx, pats=[], opts={}, globbed=False,
291 291 default='relpath', badfn=None):
292 292 """Matcher that merges root directory with .hglf, suitable for log.
293 293 It is still possible to match .hglf directly.
294 294 For any listed files run log on the standin too.
295 295 matchfn tries both the given filename and with .hglf stripped.
296 296 """
297 297 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default,
298 298 badfn=badfn)
299 299 m, p = copy.copy(matchandpats)
300 300
301 301 if m.always():
302 302 # We want to match everything anyway, so there's no benefit trying
303 303 # to add standins.
304 304 return matchandpats
305 305
306 306 pats = set(p)
307 307
308 308 def fixpats(pat, tostandin=lfutil.standin):
309 309 if pat.startswith('set:'):
310 310 return pat
311 311
312 312 kindpat = match_._patsplit(pat, None)
313 313
314 314 if kindpat[0] is not None:
315 315 return kindpat[0] + ':' + tostandin(kindpat[1])
316 316 return tostandin(kindpat[1])
317 317
318 318 if m._cwd:
319 319 hglf = lfutil.shortname
320 320 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
321 321
322 322 def tostandin(f):
323 323 # The file may already be a standin, so trucate the back
324 324 # prefix and test before mangling it. This avoids turning
325 325 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
326 326 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
327 327 return f
328 328
329 329 # An absolute path is from outside the repo, so truncate the
330 330 # path to the root before building the standin. Otherwise cwd
331 331 # is somewhere in the repo, relative to root, and needs to be
332 332 # prepended before building the standin.
333 333 if os.path.isabs(m._cwd):
334 334 f = f[len(back):]
335 335 else:
336 336 f = m._cwd + '/' + f
337 337 return back + lfutil.standin(f)
338 338
339 339 pats.update(fixpats(f, tostandin) for f in p)
340 340 else:
341 341 def tostandin(f):
342 342 if lfutil.splitstandin(f):
343 343 return f
344 344 return lfutil.standin(f)
345 345 pats.update(fixpats(f, tostandin) for f in p)
346 346
347 347 for i in range(0, len(m._files)):
348 348 # Don't add '.hglf' to m.files, since that is already covered by '.'
349 349 if m._files[i] == '.':
350 350 continue
351 351 standin = lfutil.standin(m._files[i])
352 352 # If the "standin" is a directory, append instead of replace to
353 353 # support naming a directory on the command line with only
354 354 # largefiles. The original directory is kept to support normal
355 355 # files.
356 356 if standin in repo[ctx.node()]:
357 357 m._files[i] = standin
358 358 elif m._files[i] not in repo[ctx.node()] \
359 359 and repo.wvfs.isdir(standin):
360 360 m._files.append(standin)
361 361
362 362 m._fileroots = set(m._files)
363 363 m._always = False
364 364 origmatchfn = m.matchfn
365 365 def lfmatchfn(f):
366 366 lf = lfutil.splitstandin(f)
367 367 if lf is not None and origmatchfn(lf):
368 368 return True
369 369 r = origmatchfn(f)
370 370 return r
371 371 m.matchfn = lfmatchfn
372 372
373 373 ui.debug('updated patterns: %s\n' % sorted(pats))
374 374 return m, pats
375 375
376 376 # For hg log --patch, the match object is used in two different senses:
377 377 # (1) to determine what revisions should be printed out, and
378 378 # (2) to determine what files to print out diffs for.
379 379 # The magic matchandpats override should be used for case (1) but not for
380 380 # case (2).
381 381 def overridemakelogfilematcher(repo, pats, opts, badfn=None):
382 382 wctx = repo[None]
383 383 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
384 384 return lambda rev: match
385 385
386 386 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
387 387 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
388 388 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
389 389
390 390 try:
391 391 return orig(ui, repo, *pats, **opts)
392 392 finally:
393 393 restorematchandpatsfn()
394 394 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
395 395
396 396 def overrideverify(orig, ui, repo, *pats, **opts):
397 397 large = opts.pop('large', False)
398 398 all = opts.pop('lfa', False)
399 399 contents = opts.pop('lfc', False)
400 400
401 401 result = orig(ui, repo, *pats, **opts)
402 402 if large or all or contents:
403 403 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
404 404 return result
405 405
406 406 def overridedebugstate(orig, ui, repo, *pats, **opts):
407 407 large = opts.pop('large', False)
408 408 if large:
409 409 class fakerepo(object):
410 410 dirstate = lfutil.openlfdirstate(ui, repo)
411 411 orig(ui, fakerepo, *pats, **opts)
412 412 else:
413 413 orig(ui, repo, *pats, **opts)
414 414
415 415 # Before starting the manifest merge, merge.updates will call
416 416 # _checkunknownfile to check if there are any files in the merged-in
417 417 # changeset that collide with unknown files in the working copy.
418 418 #
419 419 # The largefiles are seen as unknown, so this prevents us from merging
420 420 # in a file 'foo' if we already have a largefile with the same name.
421 421 #
422 422 # The overridden function filters the unknown files by removing any
423 423 # largefiles. This makes the merge proceed and we can then handle this
424 424 # case further in the overridden calculateupdates function below.
425 425 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
426 426 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
427 427 return False
428 428 return origfn(repo, wctx, mctx, f, f2)
429 429
430 430 # The manifest merge handles conflicts on the manifest level. We want
431 431 # to handle changes in largefile-ness of files at this level too.
432 432 #
433 433 # The strategy is to run the original calculateupdates and then process
434 434 # the action list it outputs. There are two cases we need to deal with:
435 435 #
436 436 # 1. Normal file in p1, largefile in p2. Here the largefile is
437 437 # detected via its standin file, which will enter the working copy
438 438 # with a "get" action. It is not "merge" since the standin is all
439 439 # Mercurial is concerned with at this level -- the link to the
440 440 # existing normal file is not relevant here.
441 441 #
442 442 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
443 443 # since the largefile will be present in the working copy and
444 444 # different from the normal file in p2. Mercurial therefore
445 445 # triggers a merge action.
446 446 #
447 447 # In both cases, we prompt the user and emit new actions to either
448 448 # remove the standin (if the normal file was kept) or to remove the
449 449 # normal file and get the standin (if the largefile was kept). The
450 450 # default prompt answer is to use the largefile version since it was
451 451 # presumably changed on purpose.
452 452 #
453 453 # Finally, the merge.applyupdates function will then take care of
454 454 # writing the files into the working copy and lfcommands.updatelfiles
455 455 # will update the largefiles.
456 456 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
457 457 partial, acceptremote, followcopies):
458 458 overwrite = force and not branchmerge
459 459 actions, diverge, renamedelete = origfn(
460 460 repo, p1, p2, pas, branchmerge, force, partial, acceptremote,
461 461 followcopies)
462 462
463 463 if overwrite:
464 464 return actions, diverge, renamedelete
465 465
466 466 # Convert to dictionary with filename as key and action as value.
467 467 lfiles = set()
468 468 for f in actions:
469 469 splitstandin = f and lfutil.splitstandin(f)
470 470 if splitstandin in p1:
471 471 lfiles.add(splitstandin)
472 472 elif lfutil.standin(f) in p1:
473 473 lfiles.add(f)
474 474
475 475 for lfile in lfiles:
476 476 standin = lfutil.standin(lfile)
477 477 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
478 478 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
479 479 if sm in ('g', 'dc') and lm != 'r':
480 480 # Case 1: normal file in the working copy, largefile in
481 481 # the second parent
482 482 usermsg = _('remote turned local normal file %s into a largefile\n'
483 483 'use (l)argefile or keep (n)ormal file?'
484 484 '$$ &Largefile $$ &Normal file') % lfile
485 485 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
486 486 actions[lfile] = ('r', None, 'replaced by standin')
487 487 actions[standin] = ('g', sargs, 'replaces standin')
488 488 else: # keep local normal file
489 489 actions[lfile] = ('k', None, 'replaces standin')
490 490 if branchmerge:
491 491 actions[standin] = ('k', None, 'replaced by non-standin')
492 492 else:
493 493 actions[standin] = ('r', None, 'replaced by non-standin')
494 494 elif lm in ('g', 'dc') and sm != 'r':
495 495 # Case 2: largefile in the working copy, normal file in
496 496 # the second parent
497 497 usermsg = _('remote turned local largefile %s into a normal file\n'
498 498 'keep (l)argefile or use (n)ormal file?'
499 499 '$$ &Largefile $$ &Normal file') % lfile
500 500 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
501 501 if branchmerge:
502 502 # largefile can be restored from standin safely
503 503 actions[lfile] = ('k', None, 'replaced by standin')
504 504 actions[standin] = ('k', None, 'replaces standin')
505 505 else:
506 506 # "lfile" should be marked as "removed" without
507 507 # removal of itself
508 508 actions[lfile] = ('lfmr', None,
509 509 'forget non-standin largefile')
510 510
511 511 # linear-merge should treat this largefile as 're-added'
512 512 actions[standin] = ('a', None, 'keep standin')
513 513 else: # pick remote normal file
514 514 actions[lfile] = ('g', largs, 'replaces standin')
515 515 actions[standin] = ('r', None, 'replaced by non-standin')
516 516
517 517 return actions, diverge, renamedelete
518 518
519 519 def mergerecordupdates(orig, repo, actions, branchmerge):
520 520 if 'lfmr' in actions:
521 521 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
522 522 for lfile, args, msg in actions['lfmr']:
523 523 # this should be executed before 'orig', to execute 'remove'
524 524 # before all other actions
525 525 repo.dirstate.remove(lfile)
526 526 # make sure lfile doesn't get synclfdirstate'd as normal
527 527 lfdirstate.add(lfile)
528 528 lfdirstate.write()
529 529
530 530 return orig(repo, actions, branchmerge)
531 531
532 532
533 533 # Override filemerge to prompt the user about how they wish to merge
534 534 # largefiles. This will handle identical edits without prompting the user.
535 535 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca, labels=None):
536 536 if not lfutil.isstandin(orig):
537 537 return origfn(repo, mynode, orig, fcd, fco, fca, labels=labels)
538 538
539 539 ahash = fca.data().strip().lower()
540 540 dhash = fcd.data().strip().lower()
541 541 ohash = fco.data().strip().lower()
542 542 if (ohash != ahash and
543 543 ohash != dhash and
544 544 (dhash == ahash or
545 545 repo.ui.promptchoice(
546 546 _('largefile %s has a merge conflict\nancestor was %s\n'
547 547 'keep (l)ocal %s or\ntake (o)ther %s?'
548 548 '$$ &Local $$ &Other') %
549 549 (lfutil.splitstandin(orig), ahash, dhash, ohash),
550 550 0) == 1)):
551 551 repo.wwrite(fcd.path(), fco.data(), fco.flags())
552 552 return 0
553 553
554 554 def copiespathcopies(orig, ctx1, ctx2, match=None):
555 555 copies = orig(ctx1, ctx2, match=match)
556 556 updated = {}
557 557
558 558 for k, v in copies.iteritems():
559 559 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
560 560
561 561 return updated
562 562
563 563 # Copy first changes the matchers to match standins instead of
564 564 # largefiles. Then it overrides util.copyfile in that function it
565 565 # checks if the destination largefile already exists. It also keeps a
566 566 # list of copied files so that the largefiles can be copied and the
567 567 # dirstate updated.
568 568 def overridecopy(orig, ui, repo, pats, opts, rename=False):
569 569 # doesn't remove largefile on rename
570 570 if len(pats) < 2:
571 571 # this isn't legal, let the original function deal with it
572 572 return orig(ui, repo, pats, opts, rename)
573 573
574 574 # This could copy both lfiles and normal files in one command,
575 575 # but we don't want to do that. First replace their matcher to
576 576 # only match normal files and run it, then replace it to just
577 577 # match largefiles and run it again.
578 578 nonormalfiles = False
579 579 nolfiles = False
580 580 installnormalfilesmatchfn(repo[None].manifest())
581 581 try:
582 582 result = orig(ui, repo, pats, opts, rename)
583 583 except util.Abort, e:
584 584 if str(e) != _('no files to copy'):
585 585 raise e
586 586 else:
587 587 nonormalfiles = True
588 588 result = 0
589 589 finally:
590 590 restorematchfn()
591 591
592 592 # The first rename can cause our current working directory to be removed.
593 593 # In that case there is nothing left to copy/rename so just quit.
594 594 try:
595 595 repo.getcwd()
596 596 except OSError:
597 597 return result
598 598
599 599 def makestandin(relpath):
600 600 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
601 601 return os.path.join(repo.wjoin(lfutil.standin(path)))
602 602
603 603 fullpats = scmutil.expandpats(pats)
604 604 dest = fullpats[-1]
605 605
606 606 if os.path.isdir(dest):
607 607 if not os.path.isdir(makestandin(dest)):
608 608 os.makedirs(makestandin(dest))
609 609
610 610 try:
611 611 # When we call orig below it creates the standins but we don't add
612 612 # them to the dir state until later so lock during that time.
613 613 wlock = repo.wlock()
614 614
615 615 manifest = repo[None].manifest()
616 616 def overridematch(ctx, pats=[], opts={}, globbed=False,
617 617 default='relpath', badfn=None):
618 618 newpats = []
619 619 # The patterns were previously mangled to add the standin
620 620 # directory; we need to remove that now
621 621 for pat in pats:
622 622 if match_.patkind(pat) is None and lfutil.shortname in pat:
623 623 newpats.append(pat.replace(lfutil.shortname, ''))
624 624 else:
625 625 newpats.append(pat)
626 626 match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn)
627 627 m = copy.copy(match)
628 628 lfile = lambda f: lfutil.standin(f) in manifest
629 629 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
630 630 m._fileroots = set(m._files)
631 631 origmatchfn = m.matchfn
632 632 m.matchfn = lambda f: (lfutil.isstandin(f) and
633 633 (f in manifest) and
634 634 origmatchfn(lfutil.splitstandin(f)) or
635 635 None)
636 636 return m
637 637 oldmatch = installmatchfn(overridematch)
638 638 listpats = []
639 639 for pat in pats:
640 640 if match_.patkind(pat) is not None:
641 641 listpats.append(pat)
642 642 else:
643 643 listpats.append(makestandin(pat))
644 644
645 645 try:
646 646 origcopyfile = util.copyfile
647 647 copiedfiles = []
648 648 def overridecopyfile(src, dest):
649 649 if (lfutil.shortname in src and
650 650 dest.startswith(repo.wjoin(lfutil.shortname))):
651 651 destlfile = dest.replace(lfutil.shortname, '')
652 652 if not opts['force'] and os.path.exists(destlfile):
653 653 raise IOError('',
654 654 _('destination largefile already exists'))
655 655 copiedfiles.append((src, dest))
656 656 origcopyfile(src, dest)
657 657
658 658 util.copyfile = overridecopyfile
659 659 result += orig(ui, repo, listpats, opts, rename)
660 660 finally:
661 661 util.copyfile = origcopyfile
662 662
663 663 lfdirstate = lfutil.openlfdirstate(ui, repo)
664 664 for (src, dest) in copiedfiles:
665 665 if (lfutil.shortname in src and
666 666 dest.startswith(repo.wjoin(lfutil.shortname))):
667 667 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
668 668 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
669 669 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
670 670 if not os.path.isdir(destlfiledir):
671 671 os.makedirs(destlfiledir)
672 672 if rename:
673 673 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
674 674
675 675 # The file is gone, but this deletes any empty parent
676 676 # directories as a side-effect.
677 677 util.unlinkpath(repo.wjoin(srclfile), True)
678 678 lfdirstate.remove(srclfile)
679 679 else:
680 680 util.copyfile(repo.wjoin(srclfile),
681 681 repo.wjoin(destlfile))
682 682
683 683 lfdirstate.add(destlfile)
684 684 lfdirstate.write()
685 685 except util.Abort, e:
686 686 if str(e) != _('no files to copy'):
687 687 raise e
688 688 else:
689 689 nolfiles = True
690 690 finally:
691 691 restorematchfn()
692 692 wlock.release()
693 693
694 694 if nolfiles and nonormalfiles:
695 695 raise util.Abort(_('no files to copy'))
696 696
697 697 return result
698 698
699 699 # When the user calls revert, we have to be careful to not revert any
700 700 # changes to other largefiles accidentally. This means we have to keep
701 701 # track of the largefiles that are being reverted so we only pull down
702 702 # the necessary largefiles.
703 703 #
704 704 # Standins are only updated (to match the hash of largefiles) before
705 705 # commits. Update the standins then run the original revert, changing
706 706 # the matcher to hit standins instead of largefiles. Based on the
707 707 # resulting standins update the largefiles.
708 708 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
709 709 # Because we put the standins in a bad state (by updating them)
710 710 # and then return them to a correct state we need to lock to
711 711 # prevent others from changing them in their incorrect state.
712 712 wlock = repo.wlock()
713 713 try:
714 714 lfdirstate = lfutil.openlfdirstate(ui, repo)
715 715 s = lfutil.lfdirstatestatus(lfdirstate, repo)
716 716 lfdirstate.write()
717 717 for lfile in s.modified:
718 718 lfutil.updatestandin(repo, lfutil.standin(lfile))
719 719 for lfile in s.deleted:
720 720 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
721 721 os.unlink(repo.wjoin(lfutil.standin(lfile)))
722 722
723 723 oldstandins = lfutil.getstandinsstate(repo)
724 724
725 725 def overridematch(mctx, pats=[], opts={}, globbed=False,
726 726 default='relpath', badfn=None):
727 727 match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn)
728 728 m = copy.copy(match)
729 729
730 730 # revert supports recursing into subrepos, and though largefiles
731 731 # currently doesn't work correctly in that case, this match is
732 732 # called, so the lfdirstate above may not be the correct one for
733 733 # this invocation of match.
734 734 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
735 735 False)
736 736
737 737 def tostandin(f):
738 738 standin = lfutil.standin(f)
739 739 if standin in ctx or standin in mctx:
740 740 return standin
741 741 elif standin in repo[None] or lfdirstate[f] == 'r':
742 742 return None
743 743 return f
744 744 m._files = [tostandin(f) for f in m._files]
745 745 m._files = [f for f in m._files if f is not None]
746 746 m._fileroots = set(m._files)
747 747 origmatchfn = m.matchfn
748 748 def matchfn(f):
749 749 if lfutil.isstandin(f):
750 750 return (origmatchfn(lfutil.splitstandin(f)) and
751 751 (f in ctx or f in mctx))
752 752 return origmatchfn(f)
753 753 m.matchfn = matchfn
754 754 return m
755 755 oldmatch = installmatchfn(overridematch)
756 756 try:
757 757 orig(ui, repo, ctx, parents, *pats, **opts)
758 758 finally:
759 759 restorematchfn()
760 760
761 761 newstandins = lfutil.getstandinsstate(repo)
762 762 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
763 763 # lfdirstate should be 'normallookup'-ed for updated files,
764 764 # because reverting doesn't touch dirstate for 'normal' files
765 765 # when target revision is explicitly specified: in such case,
766 766 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
767 767 # of target (standin) file.
768 768 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
769 769 normallookup=True)
770 770
771 771 finally:
772 772 wlock.release()
773 773
774 774 # after pulling changesets, we need to take some extra care to get
775 775 # largefiles updated remotely
776 776 def overridepull(orig, ui, repo, source=None, **opts):
777 777 revsprepull = len(repo)
778 778 if not source:
779 779 source = 'default'
780 780 repo.lfpullsource = source
781 781 result = orig(ui, repo, source, **opts)
782 782 revspostpull = len(repo)
783 783 lfrevs = opts.get('lfrev', [])
784 784 if opts.get('all_largefiles'):
785 785 lfrevs.append('pulled()')
786 786 if lfrevs and revspostpull > revsprepull:
787 787 numcached = 0
788 788 repo.firstpulled = revsprepull # for pulled() revset expression
789 789 try:
790 790 for rev in scmutil.revrange(repo, lfrevs):
791 791 ui.note(_('pulling largefiles for revision %s\n') % rev)
792 792 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
793 793 numcached += len(cached)
794 794 finally:
795 795 del repo.firstpulled
796 796 ui.status(_("%d largefiles cached\n") % numcached)
797 797 return result
798 798
799 799 def pulledrevsetsymbol(repo, subset, x):
800 800 """``pulled()``
801 801 Changesets that just has been pulled.
802 802
803 803 Only available with largefiles from pull --lfrev expressions.
804 804
805 805 .. container:: verbose
806 806
807 807 Some examples:
808 808
809 809 - pull largefiles for all new changesets::
810 810
811 811 hg pull -lfrev "pulled()"
812 812
813 813 - pull largefiles for all new branch heads::
814 814
815 815 hg pull -lfrev "head(pulled()) and not closed()"
816 816
817 817 """
818 818
819 819 try:
820 820 firstpulled = repo.firstpulled
821 821 except AttributeError:
822 822 raise util.Abort(_("pulled() only available in --lfrev"))
823 823 return revset.baseset([r for r in subset if r >= firstpulled])
824 824
825 825 def overrideclone(orig, ui, source, dest=None, **opts):
826 826 d = dest
827 827 if d is None:
828 828 d = hg.defaultdest(source)
829 829 if opts.get('all_largefiles') and not hg.islocal(d):
830 830 raise util.Abort(_(
831 831 '--all-largefiles is incompatible with non-local destination %s') %
832 832 d)
833 833
834 834 return orig(ui, source, dest, **opts)
835 835
836 836 def hgclone(orig, ui, opts, *args, **kwargs):
837 837 result = orig(ui, opts, *args, **kwargs)
838 838
839 839 if result is not None:
840 840 sourcerepo, destrepo = result
841 841 repo = destrepo.local()
842 842
843 843 # When cloning to a remote repo (like through SSH), no repo is available
844 844 # from the peer. Therefore the largefiles can't be downloaded and the
845 845 # hgrc can't be updated.
846 846 if not repo:
847 847 return result
848 848
849 849 # If largefiles is required for this repo, permanently enable it locally
850 850 if 'largefiles' in repo.requirements:
851 851 fp = repo.vfs('hgrc', 'a', text=True)
852 852 try:
853 853 fp.write('\n[extensions]\nlargefiles=\n')
854 854 finally:
855 855 fp.close()
856 856
857 857 # Caching is implicitly limited to 'rev' option, since the dest repo was
858 858 # truncated at that point. The user may expect a download count with
859 859 # this option, so attempt whether or not this is a largefile repo.
860 860 if opts.get('all_largefiles'):
861 861 success, missing = lfcommands.downloadlfiles(ui, repo, None)
862 862
863 863 if missing != 0:
864 864 return None
865 865
866 866 return result
867 867
868 868 def overriderebase(orig, ui, repo, **opts):
869 869 if not util.safehasattr(repo, '_largefilesenabled'):
870 870 return orig(ui, repo, **opts)
871 871
872 872 resuming = opts.get('continue')
873 873 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
874 874 repo._lfstatuswriters.append(lambda *msg, **opts: None)
875 875 try:
876 876 return orig(ui, repo, **opts)
877 877 finally:
878 878 repo._lfstatuswriters.pop()
879 879 repo._lfcommithooks.pop()
880 880
881 881 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
882 882 prefix='', mtime=None, subrepos=None):
883 883 # No need to lock because we are only reading history and
884 884 # largefile caches, neither of which are modified.
885 885 if node is not None:
886 886 lfcommands.cachelfiles(repo.ui, repo, node)
887 887
888 888 if kind not in archival.archivers:
889 889 raise util.Abort(_("unknown archive type '%s'") % kind)
890 890
891 891 ctx = repo[node]
892 892
893 893 if kind == 'files':
894 894 if prefix:
895 895 raise util.Abort(
896 896 _('cannot give prefix when archiving to files'))
897 897 else:
898 898 prefix = archival.tidyprefix(dest, kind, prefix)
899 899
900 900 def write(name, mode, islink, getdata):
901 901 if matchfn and not matchfn(name):
902 902 return
903 903 data = getdata()
904 904 if decode:
905 905 data = repo.wwritedata(name, data)
906 906 archiver.addfile(prefix + name, mode, islink, data)
907 907
908 908 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
909 909
910 910 if repo.ui.configbool("ui", "archivemeta", True):
911 write('.hg_archival.txt', 0644, False,
911 write('.hg_archival.txt', 0o644, False,
912 912 lambda: archival.buildmetadata(ctx))
913 913
914 914 for f in ctx:
915 915 ff = ctx.flags(f)
916 916 getdata = ctx[f].data
917 917 if lfutil.isstandin(f):
918 918 if node is not None:
919 919 path = lfutil.findfile(repo, getdata().strip())
920 920
921 921 if path is None:
922 922 raise util.Abort(
923 923 _('largefile %s not found in repo store or system cache')
924 924 % lfutil.splitstandin(f))
925 925 else:
926 926 path = lfutil.splitstandin(f)
927 927
928 928 f = lfutil.splitstandin(f)
929 929
930 930 def getdatafn():
931 931 fd = None
932 932 try:
933 933 fd = open(path, 'rb')
934 934 return fd.read()
935 935 finally:
936 936 if fd:
937 937 fd.close()
938 938
939 939 getdata = getdatafn
940 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
940 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
941 941
942 942 if subrepos:
943 943 for subpath in sorted(ctx.substate):
944 944 sub = ctx.workingsub(subpath)
945 945 submatch = match_.narrowmatcher(subpath, matchfn)
946 946 sub.archive(archiver, prefix, submatch)
947 947
948 948 archiver.done()
949 949
950 950 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
951 951 repo._get(repo._state + ('hg',))
952 952 rev = repo._state[1]
953 953 ctx = repo._repo[rev]
954 954
955 955 if ctx.node() is not None:
956 956 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
957 957
958 958 def write(name, mode, islink, getdata):
959 959 # At this point, the standin has been replaced with the largefile name,
960 960 # so the normal matcher works here without the lfutil variants.
961 961 if match and not match(f):
962 962 return
963 963 data = getdata()
964 964
965 965 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
966 966
967 967 for f in ctx:
968 968 ff = ctx.flags(f)
969 969 getdata = ctx[f].data
970 970 if lfutil.isstandin(f):
971 971 if ctx.node() is not None:
972 972 path = lfutil.findfile(repo._repo, getdata().strip())
973 973
974 974 if path is None:
975 975 raise util.Abort(
976 976 _('largefile %s not found in repo store or system cache')
977 977 % lfutil.splitstandin(f))
978 978 else:
979 979 path = lfutil.splitstandin(f)
980 980
981 981 f = lfutil.splitstandin(f)
982 982
983 983 def getdatafn():
984 984 fd = None
985 985 try:
986 986 fd = open(os.path.join(prefix, path), 'rb')
987 987 return fd.read()
988 988 finally:
989 989 if fd:
990 990 fd.close()
991 991
992 992 getdata = getdatafn
993 993
994 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
994 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
995 995
996 996 for subpath in sorted(ctx.substate):
997 997 sub = ctx.workingsub(subpath)
998 998 submatch = match_.narrowmatcher(subpath, match)
999 999 sub.archive(archiver, prefix + repo._path + '/', submatch)
1000 1000
1001 1001 # If a largefile is modified, the change is not reflected in its
1002 1002 # standin until a commit. cmdutil.bailifchanged() raises an exception
1003 1003 # if the repo has uncommitted changes. Wrap it to also check if
1004 1004 # largefiles were changed. This is used by bisect, backout and fetch.
1005 1005 def overridebailifchanged(orig, repo, *args, **kwargs):
1006 1006 orig(repo, *args, **kwargs)
1007 1007 repo.lfstatus = True
1008 1008 s = repo.status()
1009 1009 repo.lfstatus = False
1010 1010 if s.modified or s.added or s.removed or s.deleted:
1011 1011 raise util.Abort(_('uncommitted changes'))
1012 1012
1013 1013 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly):
1014 1014 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1015 1015 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly)
1016 1016 m = composelargefilematcher(match, repo[None].manifest())
1017 1017
1018 1018 try:
1019 1019 repo.lfstatus = True
1020 1020 s = repo.status(match=m, clean=True)
1021 1021 finally:
1022 1022 repo.lfstatus = False
1023 1023 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1024 1024 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
1025 1025
1026 1026 for f in forget:
1027 1027 if lfutil.standin(f) not in repo.dirstate and not \
1028 1028 repo.wvfs.isdir(lfutil.standin(f)):
1029 1029 ui.warn(_('not removing %s: file is already untracked\n')
1030 1030 % m.rel(f))
1031 1031 bad.append(f)
1032 1032
1033 1033 for f in forget:
1034 1034 if ui.verbose or not m.exact(f):
1035 1035 ui.status(_('removing %s\n') % m.rel(f))
1036 1036
1037 1037 # Need to lock because standin files are deleted then removed from the
1038 1038 # repository and we could race in-between.
1039 1039 wlock = repo.wlock()
1040 1040 try:
1041 1041 lfdirstate = lfutil.openlfdirstate(ui, repo)
1042 1042 for f in forget:
1043 1043 if lfdirstate[f] == 'a':
1044 1044 lfdirstate.drop(f)
1045 1045 else:
1046 1046 lfdirstate.remove(f)
1047 1047 lfdirstate.write()
1048 1048 standins = [lfutil.standin(f) for f in forget]
1049 1049 for f in standins:
1050 1050 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1051 1051 rejected = repo[None].forget(standins)
1052 1052 finally:
1053 1053 wlock.release()
1054 1054
1055 1055 bad.extend(f for f in rejected if f in m.files())
1056 1056 forgot.extend(f for f in forget if f not in rejected)
1057 1057 return bad, forgot
1058 1058
1059 1059 def _getoutgoings(repo, other, missing, addfunc):
1060 1060 """get pairs of filename and largefile hash in outgoing revisions
1061 1061 in 'missing'.
1062 1062
1063 1063 largefiles already existing on 'other' repository are ignored.
1064 1064
1065 1065 'addfunc' is invoked with each unique pairs of filename and
1066 1066 largefile hash value.
1067 1067 """
1068 1068 knowns = set()
1069 1069 lfhashes = set()
1070 1070 def dedup(fn, lfhash):
1071 1071 k = (fn, lfhash)
1072 1072 if k not in knowns:
1073 1073 knowns.add(k)
1074 1074 lfhashes.add(lfhash)
1075 1075 lfutil.getlfilestoupload(repo, missing, dedup)
1076 1076 if lfhashes:
1077 1077 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1078 1078 for fn, lfhash in knowns:
1079 1079 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1080 1080 addfunc(fn, lfhash)
1081 1081
1082 1082 def outgoinghook(ui, repo, other, opts, missing):
1083 1083 if opts.pop('large', None):
1084 1084 lfhashes = set()
1085 1085 if ui.debugflag:
1086 1086 toupload = {}
1087 1087 def addfunc(fn, lfhash):
1088 1088 if fn not in toupload:
1089 1089 toupload[fn] = []
1090 1090 toupload[fn].append(lfhash)
1091 1091 lfhashes.add(lfhash)
1092 1092 def showhashes(fn):
1093 1093 for lfhash in sorted(toupload[fn]):
1094 1094 ui.debug(' %s\n' % (lfhash))
1095 1095 else:
1096 1096 toupload = set()
1097 1097 def addfunc(fn, lfhash):
1098 1098 toupload.add(fn)
1099 1099 lfhashes.add(lfhash)
1100 1100 def showhashes(fn):
1101 1101 pass
1102 1102 _getoutgoings(repo, other, missing, addfunc)
1103 1103
1104 1104 if not toupload:
1105 1105 ui.status(_('largefiles: no files to upload\n'))
1106 1106 else:
1107 1107 ui.status(_('largefiles to upload (%d entities):\n')
1108 1108 % (len(lfhashes)))
1109 1109 for file in sorted(toupload):
1110 1110 ui.status(lfutil.splitstandin(file) + '\n')
1111 1111 showhashes(file)
1112 1112 ui.status('\n')
1113 1113
1114 1114 def summaryremotehook(ui, repo, opts, changes):
1115 1115 largeopt = opts.get('large', False)
1116 1116 if changes is None:
1117 1117 if largeopt:
1118 1118 return (False, True) # only outgoing check is needed
1119 1119 else:
1120 1120 return (False, False)
1121 1121 elif largeopt:
1122 1122 url, branch, peer, outgoing = changes[1]
1123 1123 if peer is None:
1124 1124 # i18n: column positioning for "hg summary"
1125 1125 ui.status(_('largefiles: (no remote repo)\n'))
1126 1126 return
1127 1127
1128 1128 toupload = set()
1129 1129 lfhashes = set()
1130 1130 def addfunc(fn, lfhash):
1131 1131 toupload.add(fn)
1132 1132 lfhashes.add(lfhash)
1133 1133 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1134 1134
1135 1135 if not toupload:
1136 1136 # i18n: column positioning for "hg summary"
1137 1137 ui.status(_('largefiles: (no files to upload)\n'))
1138 1138 else:
1139 1139 # i18n: column positioning for "hg summary"
1140 1140 ui.status(_('largefiles: %d entities for %d files to upload\n')
1141 1141 % (len(lfhashes), len(toupload)))
1142 1142
1143 1143 def overridesummary(orig, ui, repo, *pats, **opts):
1144 1144 try:
1145 1145 repo.lfstatus = True
1146 1146 orig(ui, repo, *pats, **opts)
1147 1147 finally:
1148 1148 repo.lfstatus = False
1149 1149
1150 1150 def scmutiladdremove(orig, repo, matcher, prefix, opts={}, dry_run=None,
1151 1151 similarity=None):
1152 1152 if not lfutil.islfilesrepo(repo):
1153 1153 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1154 1154 # Get the list of missing largefiles so we can remove them
1155 1155 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1156 1156 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1157 1157 False, False, False)
1158 1158
1159 1159 # Call into the normal remove code, but the removing of the standin, we want
1160 1160 # to have handled by original addremove. Monkey patching here makes sure
1161 1161 # we don't remove the standin in the largefiles code, preventing a very
1162 1162 # confused state later.
1163 1163 if s.deleted:
1164 1164 m = copy.copy(matcher)
1165 1165
1166 1166 # The m._files and m._map attributes are not changed to the deleted list
1167 1167 # because that affects the m.exact() test, which in turn governs whether
1168 1168 # or not the file name is printed, and how. Simply limit the original
1169 1169 # matches to those in the deleted status list.
1170 1170 matchfn = m.matchfn
1171 1171 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1172 1172
1173 1173 removelargefiles(repo.ui, repo, True, m, **opts)
1174 1174 # Call into the normal add code, and any files that *should* be added as
1175 1175 # largefiles will be
1176 1176 added, bad = addlargefiles(repo.ui, repo, True, matcher, **opts)
1177 1177 # Now that we've handled largefiles, hand off to the original addremove
1178 1178 # function to take care of the rest. Make sure it doesn't do anything with
1179 1179 # largefiles by passing a matcher that will ignore them.
1180 1180 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1181 1181 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1182 1182
1183 1183 # Calling purge with --all will cause the largefiles to be deleted.
1184 1184 # Override repo.status to prevent this from happening.
1185 1185 def overridepurge(orig, ui, repo, *dirs, **opts):
1186 1186 # XXX Monkey patching a repoview will not work. The assigned attribute will
1187 1187 # be set on the unfiltered repo, but we will only lookup attributes in the
1188 1188 # unfiltered repo if the lookup in the repoview object itself fails. As the
1189 1189 # monkey patched method exists on the repoview class the lookup will not
1190 1190 # fail. As a result, the original version will shadow the monkey patched
1191 1191 # one, defeating the monkey patch.
1192 1192 #
1193 1193 # As a work around we use an unfiltered repo here. We should do something
1194 1194 # cleaner instead.
1195 1195 repo = repo.unfiltered()
1196 1196 oldstatus = repo.status
1197 1197 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1198 1198 clean=False, unknown=False, listsubrepos=False):
1199 1199 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1200 1200 listsubrepos)
1201 1201 lfdirstate = lfutil.openlfdirstate(ui, repo)
1202 1202 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1203 1203 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1204 1204 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1205 1205 unknown, ignored, r.clean)
1206 1206 repo.status = overridestatus
1207 1207 orig(ui, repo, *dirs, **opts)
1208 1208 repo.status = oldstatus
1209 1209 def overriderollback(orig, ui, repo, **opts):
1210 1210 wlock = repo.wlock()
1211 1211 try:
1212 1212 before = repo.dirstate.parents()
1213 1213 orphans = set(f for f in repo.dirstate
1214 1214 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1215 1215 result = orig(ui, repo, **opts)
1216 1216 after = repo.dirstate.parents()
1217 1217 if before == after:
1218 1218 return result # no need to restore standins
1219 1219
1220 1220 pctx = repo['.']
1221 1221 for f in repo.dirstate:
1222 1222 if lfutil.isstandin(f):
1223 1223 orphans.discard(f)
1224 1224 if repo.dirstate[f] == 'r':
1225 1225 repo.wvfs.unlinkpath(f, ignoremissing=True)
1226 1226 elif f in pctx:
1227 1227 fctx = pctx[f]
1228 1228 repo.wwrite(f, fctx.data(), fctx.flags())
1229 1229 else:
1230 1230 # content of standin is not so important in 'a',
1231 1231 # 'm' or 'n' (coming from the 2nd parent) cases
1232 1232 lfutil.writestandin(repo, f, '', False)
1233 1233 for standin in orphans:
1234 1234 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1235 1235
1236 1236 lfdirstate = lfutil.openlfdirstate(ui, repo)
1237 1237 orphans = set(lfdirstate)
1238 1238 lfiles = lfutil.listlfiles(repo)
1239 1239 for file in lfiles:
1240 1240 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1241 1241 orphans.discard(file)
1242 1242 for lfile in orphans:
1243 1243 lfdirstate.drop(lfile)
1244 1244 lfdirstate.write()
1245 1245 finally:
1246 1246 wlock.release()
1247 1247 return result
1248 1248
1249 1249 def overridetransplant(orig, ui, repo, *revs, **opts):
1250 1250 resuming = opts.get('continue')
1251 1251 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1252 1252 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1253 1253 try:
1254 1254 result = orig(ui, repo, *revs, **opts)
1255 1255 finally:
1256 1256 repo._lfstatuswriters.pop()
1257 1257 repo._lfcommithooks.pop()
1258 1258 return result
1259 1259
1260 1260 def overridecat(orig, ui, repo, file1, *pats, **opts):
1261 1261 ctx = scmutil.revsingle(repo, opts.get('rev'))
1262 1262 err = 1
1263 1263 notbad = set()
1264 1264 m = scmutil.match(ctx, (file1,) + pats, opts)
1265 1265 origmatchfn = m.matchfn
1266 1266 def lfmatchfn(f):
1267 1267 if origmatchfn(f):
1268 1268 return True
1269 1269 lf = lfutil.splitstandin(f)
1270 1270 if lf is None:
1271 1271 return False
1272 1272 notbad.add(lf)
1273 1273 return origmatchfn(lf)
1274 1274 m.matchfn = lfmatchfn
1275 1275 origbadfn = m.bad
1276 1276 def lfbadfn(f, msg):
1277 1277 if not f in notbad:
1278 1278 origbadfn(f, msg)
1279 1279 m.bad = lfbadfn
1280 1280
1281 1281 origvisitdirfn = m.visitdir
1282 1282 def lfvisitdirfn(dir):
1283 1283 if dir == lfutil.shortname:
1284 1284 return True
1285 1285 ret = origvisitdirfn(dir)
1286 1286 if ret:
1287 1287 return ret
1288 1288 lf = lfutil.splitstandin(dir)
1289 1289 if lf is None:
1290 1290 return False
1291 1291 return origvisitdirfn(lf)
1292 1292 m.visitdir = lfvisitdirfn
1293 1293
1294 1294 for f in ctx.walk(m):
1295 1295 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1296 1296 pathname=f)
1297 1297 lf = lfutil.splitstandin(f)
1298 1298 if lf is None or origmatchfn(f):
1299 1299 # duplicating unreachable code from commands.cat
1300 1300 data = ctx[f].data()
1301 1301 if opts.get('decode'):
1302 1302 data = repo.wwritedata(f, data)
1303 1303 fp.write(data)
1304 1304 else:
1305 1305 hash = lfutil.readstandin(repo, lf, ctx.rev())
1306 1306 if not lfutil.inusercache(repo.ui, hash):
1307 1307 store = basestore._openstore(repo)
1308 1308 success, missing = store.get([(lf, hash)])
1309 1309 if len(success) != 1:
1310 1310 raise util.Abort(
1311 1311 _('largefile %s is not in cache and could not be '
1312 1312 'downloaded') % lf)
1313 1313 path = lfutil.usercachepath(repo.ui, hash)
1314 1314 fpin = open(path, "rb")
1315 1315 for chunk in util.filechunkiter(fpin, 128 * 1024):
1316 1316 fp.write(chunk)
1317 1317 fpin.close()
1318 1318 fp.close()
1319 1319 err = 0
1320 1320 return err
1321 1321
1322 1322 def mergeupdate(orig, repo, node, branchmerge, force, partial,
1323 1323 *args, **kwargs):
1324 1324 wlock = repo.wlock()
1325 1325 try:
1326 1326 # branch | | |
1327 1327 # merge | force | partial | action
1328 1328 # -------+-------+---------+--------------
1329 1329 # x | x | x | linear-merge
1330 1330 # o | x | x | branch-merge
1331 1331 # x | o | x | overwrite (as clean update)
1332 1332 # o | o | x | force-branch-merge (*1)
1333 1333 # x | x | o | (*)
1334 1334 # o | x | o | (*)
1335 1335 # x | o | o | overwrite (as revert)
1336 1336 # o | o | o | (*)
1337 1337 #
1338 1338 # (*) don't care
1339 1339 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1340 1340
1341 1341 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1342 1342 unsure, s = lfdirstate.status(match_.always(repo.root,
1343 1343 repo.getcwd()),
1344 1344 [], False, False, False)
1345 1345 pctx = repo['.']
1346 1346 for lfile in unsure + s.modified:
1347 1347 lfileabs = repo.wvfs.join(lfile)
1348 1348 if not os.path.exists(lfileabs):
1349 1349 continue
1350 1350 lfhash = lfutil.hashrepofile(repo, lfile)
1351 1351 standin = lfutil.standin(lfile)
1352 1352 lfutil.writestandin(repo, standin, lfhash,
1353 1353 lfutil.getexecutable(lfileabs))
1354 1354 if (standin in pctx and
1355 1355 lfhash == lfutil.readstandin(repo, lfile, '.')):
1356 1356 lfdirstate.normal(lfile)
1357 1357 for lfile in s.added:
1358 1358 lfutil.updatestandin(repo, lfutil.standin(lfile))
1359 1359 lfdirstate.write()
1360 1360
1361 1361 oldstandins = lfutil.getstandinsstate(repo)
1362 1362
1363 1363 result = orig(repo, node, branchmerge, force, partial, *args, **kwargs)
1364 1364
1365 1365 newstandins = lfutil.getstandinsstate(repo)
1366 1366 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1367 1367 if branchmerge or force or partial:
1368 1368 filelist.extend(s.deleted + s.removed)
1369 1369
1370 1370 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1371 1371 normallookup=partial)
1372 1372
1373 1373 return result
1374 1374 finally:
1375 1375 wlock.release()
1376 1376
1377 1377 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1378 1378 result = orig(repo, files, *args, **kwargs)
1379 1379
1380 1380 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1381 1381 if filelist:
1382 1382 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1383 1383 printmessage=False, normallookup=True)
1384 1384
1385 1385 return result
@@ -1,332 +1,332
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 import match as matchmod
10 10 import cmdutil
11 11 import scmutil, util, encoding
12 12 import cStringIO, os, tarfile, time, zipfile
13 13 import zlib, gzip
14 14 import struct
15 15 import error
16 16
17 17 # from unzip source code:
18 18 _UNX_IFREG = 0x8000
19 19 _UNX_IFLNK = 0xa000
20 20
21 21 def tidyprefix(dest, kind, prefix):
22 22 '''choose prefix to use for names in archive. make sure prefix is
23 23 safe for consumers.'''
24 24
25 25 if prefix:
26 26 prefix = util.normpath(prefix)
27 27 else:
28 28 if not isinstance(dest, str):
29 29 raise ValueError('dest must be string if no prefix')
30 30 prefix = os.path.basename(dest)
31 31 lower = prefix.lower()
32 32 for sfx in exts.get(kind, []):
33 33 if lower.endswith(sfx):
34 34 prefix = prefix[:-len(sfx)]
35 35 break
36 36 lpfx = os.path.normpath(util.localpath(prefix))
37 37 prefix = util.pconvert(lpfx)
38 38 if not prefix.endswith('/'):
39 39 prefix += '/'
40 40 # Drop the leading '.' path component if present, so Windows can read the
41 41 # zip files (issue4634)
42 42 if prefix.startswith('./'):
43 43 prefix = prefix[2:]
44 44 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
45 45 raise util.Abort(_('archive prefix contains illegal components'))
46 46 return prefix
47 47
48 48 exts = {
49 49 'tar': ['.tar'],
50 50 'tbz2': ['.tbz2', '.tar.bz2'],
51 51 'tgz': ['.tgz', '.tar.gz'],
52 52 'zip': ['.zip'],
53 53 }
54 54
55 55 def guesskind(dest):
56 56 for kind, extensions in exts.iteritems():
57 57 if any(dest.endswith(ext) for ext in extensions):
58 58 return kind
59 59 return None
60 60
61 61 def _rootctx(repo):
62 62 # repo[0] may be hidden
63 63 for rev in repo:
64 64 return repo[rev]
65 65 return repo['null']
66 66
67 67 def buildmetadata(ctx):
68 68 '''build content of .hg_archival.txt'''
69 69 repo = ctx.repo()
70 70 hex = ctx.hex()
71 71 if ctx.rev() is None:
72 72 hex = ctx.p1().hex()
73 73 if ctx.dirty():
74 74 hex += '+'
75 75
76 76 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
77 77 _rootctx(repo).hex(), hex, encoding.fromlocal(ctx.branch()))
78 78
79 79 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
80 80 if repo.tagtype(t) == 'global')
81 81 if not tags:
82 82 repo.ui.pushbuffer()
83 83 opts = {'template': '{latesttag}\n{latesttagdistance}',
84 84 'style': '', 'patch': None, 'git': None}
85 85 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
86 86 ltags, dist = repo.ui.popbuffer().split('\n')
87 87 ltags = ltags.split(':')
88 88 changessince = len(repo.revs('only(.,%s)', ltags[0]))
89 89 tags = ''.join('latesttag: %s\n' % t for t in ltags)
90 90 tags += 'latesttagdistance: %s\n' % dist
91 91 tags += 'changessincelatesttag: %s\n' % changessince
92 92
93 93 return base + tags
94 94
95 95 class tarit(object):
96 96 '''write archive to tar file or stream. can write uncompressed,
97 97 or compress with gzip or bzip2.'''
98 98
99 99 class GzipFileWithTime(gzip.GzipFile):
100 100
101 101 def __init__(self, *args, **kw):
102 102 timestamp = None
103 103 if 'timestamp' in kw:
104 104 timestamp = kw.pop('timestamp')
105 105 if timestamp is None:
106 106 self.timestamp = time.time()
107 107 else:
108 108 self.timestamp = timestamp
109 109 gzip.GzipFile.__init__(self, *args, **kw)
110 110
111 111 def _write_gzip_header(self):
112 112 self.fileobj.write('\037\213') # magic header
113 113 self.fileobj.write('\010') # compression method
114 114 # Python 2.6 introduced self.name and deprecated self.filename
115 115 try:
116 116 fname = self.name
117 117 except AttributeError:
118 118 fname = self.filename
119 119 if fname and fname.endswith('.gz'):
120 120 fname = fname[:-3]
121 121 flags = 0
122 122 if fname:
123 123 flags = gzip.FNAME
124 124 self.fileobj.write(chr(flags))
125 125 gzip.write32u(self.fileobj, long(self.timestamp))
126 126 self.fileobj.write('\002')
127 127 self.fileobj.write('\377')
128 128 if fname:
129 129 self.fileobj.write(fname + '\000')
130 130
131 131 def __init__(self, dest, mtime, kind=''):
132 132 self.mtime = mtime
133 133 self.fileobj = None
134 134
135 135 def taropen(name, mode, fileobj=None):
136 136 if kind == 'gz':
137 137 mode = mode[0]
138 138 if not fileobj:
139 139 fileobj = open(name, mode + 'b')
140 140 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
141 141 zlib.Z_BEST_COMPRESSION,
142 142 fileobj, timestamp=mtime)
143 143 self.fileobj = gzfileobj
144 144 return tarfile.TarFile.taropen(name, mode, gzfileobj)
145 145 else:
146 146 return tarfile.open(name, mode + kind, fileobj)
147 147
148 148 if isinstance(dest, str):
149 149 self.z = taropen(dest, mode='w:')
150 150 else:
151 151 # Python 2.5-2.5.1 have a regression that requires a name arg
152 152 self.z = taropen(name='', mode='w|', fileobj=dest)
153 153
154 154 def addfile(self, name, mode, islink, data):
155 155 i = tarfile.TarInfo(name)
156 156 i.mtime = self.mtime
157 157 i.size = len(data)
158 158 if islink:
159 159 i.type = tarfile.SYMTYPE
160 i.mode = 0777
160 i.mode = 0o777
161 161 i.linkname = data
162 162 data = None
163 163 i.size = 0
164 164 else:
165 165 i.mode = mode
166 166 data = cStringIO.StringIO(data)
167 167 self.z.addfile(i, data)
168 168
169 169 def done(self):
170 170 self.z.close()
171 171 if self.fileobj:
172 172 self.fileobj.close()
173 173
174 174 class tellable(object):
175 175 '''provide tell method for zipfile.ZipFile when writing to http
176 176 response file object.'''
177 177
178 178 def __init__(self, fp):
179 179 self.fp = fp
180 180 self.offset = 0
181 181
182 182 def __getattr__(self, key):
183 183 return getattr(self.fp, key)
184 184
185 185 def write(self, s):
186 186 self.fp.write(s)
187 187 self.offset += len(s)
188 188
189 189 def tell(self):
190 190 return self.offset
191 191
192 192 class zipit(object):
193 193 '''write archive to zip file or stream. can write uncompressed,
194 194 or compressed with deflate.'''
195 195
196 196 def __init__(self, dest, mtime, compress=True):
197 197 if not isinstance(dest, str):
198 198 try:
199 199 dest.tell()
200 200 except (AttributeError, IOError):
201 201 dest = tellable(dest)
202 202 self.z = zipfile.ZipFile(dest, 'w',
203 203 compress and zipfile.ZIP_DEFLATED or
204 204 zipfile.ZIP_STORED)
205 205
206 206 # Python's zipfile module emits deprecation warnings if we try
207 207 # to store files with a date before 1980.
208 208 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
209 209 if mtime < epoch:
210 210 mtime = epoch
211 211
212 212 self.mtime = mtime
213 213 self.date_time = time.gmtime(mtime)[:6]
214 214
215 215 def addfile(self, name, mode, islink, data):
216 216 i = zipfile.ZipInfo(name, self.date_time)
217 217 i.compress_type = self.z.compression
218 218 # unzip will not honor unix file modes unless file creator is
219 219 # set to unix (id 3).
220 220 i.create_system = 3
221 221 ftype = _UNX_IFREG
222 222 if islink:
223 mode = 0777
223 mode = 0o777
224 224 ftype = _UNX_IFLNK
225 225 i.external_attr = (mode | ftype) << 16L
226 226 # add "extended-timestamp" extra block, because zip archives
227 227 # without this will be extracted with unexpected timestamp,
228 228 # if TZ is not configured as GMT
229 229 i.extra += struct.pack('<hhBl',
230 230 0x5455, # block type: "extended-timestamp"
231 231 1 + 4, # size of this block
232 232 1, # "modification time is present"
233 233 int(self.mtime)) # last modification (UTC)
234 234 self.z.writestr(i, data)
235 235
236 236 def done(self):
237 237 self.z.close()
238 238
239 239 class fileit(object):
240 240 '''write archive as files in directory.'''
241 241
242 242 def __init__(self, name, mtime):
243 243 self.basedir = name
244 244 self.opener = scmutil.opener(self.basedir)
245 245
246 246 def addfile(self, name, mode, islink, data):
247 247 if islink:
248 248 self.opener.symlink(data, name)
249 249 return
250 250 f = self.opener(name, "w", atomictemp=True)
251 251 f.write(data)
252 252 f.close()
253 253 destfile = os.path.join(self.basedir, name)
254 254 os.chmod(destfile, mode)
255 255
256 256 def done(self):
257 257 pass
258 258
259 259 archivers = {
260 260 'files': fileit,
261 261 'tar': tarit,
262 262 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
263 263 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
264 264 'uzip': lambda name, mtime: zipit(name, mtime, False),
265 265 'zip': zipit,
266 266 }
267 267
268 268 def archive(repo, dest, node, kind, decode=True, matchfn=None,
269 269 prefix='', mtime=None, subrepos=False):
270 270 '''create archive of repo as it was at node.
271 271
272 272 dest can be name of directory, name of archive file, or file
273 273 object to write archive to.
274 274
275 275 kind is type of archive to create.
276 276
277 277 decode tells whether to put files through decode filters from
278 278 hgrc.
279 279
280 280 matchfn is function to filter names of files to write to archive.
281 281
282 282 prefix is name of path to put before every archive member.'''
283 283
284 284 if kind == 'files':
285 285 if prefix:
286 286 raise util.Abort(_('cannot give prefix when archiving to files'))
287 287 else:
288 288 prefix = tidyprefix(dest, kind, prefix)
289 289
290 290 def write(name, mode, islink, getdata):
291 291 data = getdata()
292 292 if decode:
293 293 data = repo.wwritedata(name, data)
294 294 archiver.addfile(prefix + name, mode, islink, data)
295 295
296 296 if kind not in archivers:
297 297 raise util.Abort(_("unknown archive type '%s'") % kind)
298 298
299 299 ctx = repo[node]
300 300 archiver = archivers[kind](dest, mtime or ctx.date()[0])
301 301
302 302 if repo.ui.configbool("ui", "archivemeta", True):
303 303 name = '.hg_archival.txt'
304 304 if not matchfn or matchfn(name):
305 write(name, 0644, False, lambda: buildmetadata(ctx))
305 write(name, 0o644, False, lambda: buildmetadata(ctx))
306 306
307 307 if matchfn:
308 308 files = [f for f in ctx.manifest().keys() if matchfn(f)]
309 309 else:
310 310 files = ctx.manifest().keys()
311 311 total = len(files)
312 312 if total:
313 313 files.sort()
314 314 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
315 315 for i, f in enumerate(files):
316 316 ff = ctx.flags(f)
317 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
317 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
318 318 repo.ui.progress(_('archiving'), i + 1, item=f,
319 319 unit=_('files'), total=total)
320 320 repo.ui.progress(_('archiving'), None)
321 321
322 322 if subrepos:
323 323 for subpath in sorted(ctx.substate):
324 324 sub = ctx.workingsub(subpath)
325 325 submatch = matchmod.narrowmatcher(subpath, matchfn)
326 326 total += sub.archive(archiver, prefix, submatch)
327 327
328 328 if total == 0:
329 329 raise error.Abort(_('no files match the archive pattern'))
330 330
331 331 archiver.done()
332 332 return total
@@ -1,6539 +1,6539
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _
11 11 import os, re, difflib, time, tempfile, errno, shlex
12 12 import sys, socket
13 13 import hg, scmutil, util, revlog, copies, error, bookmarks
14 14 import patch, help, encoding, templatekw, discovery
15 15 import archival, changegroup, cmdutil, hbisect
16 16 import sshserver, hgweb, commandserver
17 17 import extensions
18 18 from hgweb import server as hgweb_server
19 19 import merge as mergemod
20 20 import minirst, revset, fileset
21 21 import dagparser, context, simplemerge, graphmod, copies
22 22 import random
23 23 import setdiscovery, treediscovery, dagutil, pvec, localrepo
24 24 import phases, obsolete, exchange, bundle2, repair
25 25 import ui as uimod
26 26
27 27 table = {}
28 28
29 29 command = cmdutil.command(table)
30 30
31 31 # Space delimited list of commands that don't require local repositories.
32 32 # This should be populated by passing norepo=True into the @command decorator.
33 33 norepo = ''
34 34 # Space delimited list of commands that optionally require local repositories.
35 35 # This should be populated by passing optionalrepo=True into the @command
36 36 # decorator.
37 37 optionalrepo = ''
38 38 # Space delimited list of commands that will examine arguments looking for
39 39 # a repository. This should be populated by passing inferrepo=True into the
40 40 # @command decorator.
41 41 inferrepo = ''
42 42
43 43 # label constants
44 44 # until 3.5, bookmarks.current was the advertised name, not
45 45 # bookmarks.active, so we must use both to avoid breaking old
46 46 # custom styles
47 47 activebookmarklabel = 'bookmarks.active bookmarks.current'
48 48
49 49 # common command options
50 50
51 51 globalopts = [
52 52 ('R', 'repository', '',
53 53 _('repository root directory or name of overlay bundle file'),
54 54 _('REPO')),
55 55 ('', 'cwd', '',
56 56 _('change working directory'), _('DIR')),
57 57 ('y', 'noninteractive', None,
58 58 _('do not prompt, automatically pick the first choice for all prompts')),
59 59 ('q', 'quiet', None, _('suppress output')),
60 60 ('v', 'verbose', None, _('enable additional output')),
61 61 ('', 'config', [],
62 62 _('set/override config option (use \'section.name=value\')'),
63 63 _('CONFIG')),
64 64 ('', 'debug', None, _('enable debugging output')),
65 65 ('', 'debugger', None, _('start debugger')),
66 66 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
67 67 _('ENCODE')),
68 68 ('', 'encodingmode', encoding.encodingmode,
69 69 _('set the charset encoding mode'), _('MODE')),
70 70 ('', 'traceback', None, _('always print a traceback on exception')),
71 71 ('', 'time', None, _('time how long the command takes')),
72 72 ('', 'profile', None, _('print command execution profile')),
73 73 ('', 'version', None, _('output version information and exit')),
74 74 ('h', 'help', None, _('display help and exit')),
75 75 ('', 'hidden', False, _('consider hidden changesets')),
76 76 ]
77 77
78 78 dryrunopts = [('n', 'dry-run', None,
79 79 _('do not perform actions, just print output'))]
80 80
81 81 remoteopts = [
82 82 ('e', 'ssh', '',
83 83 _('specify ssh command to use'), _('CMD')),
84 84 ('', 'remotecmd', '',
85 85 _('specify hg command to run on the remote side'), _('CMD')),
86 86 ('', 'insecure', None,
87 87 _('do not verify server certificate (ignoring web.cacerts config)')),
88 88 ]
89 89
90 90 walkopts = [
91 91 ('I', 'include', [],
92 92 _('include names matching the given patterns'), _('PATTERN')),
93 93 ('X', 'exclude', [],
94 94 _('exclude names matching the given patterns'), _('PATTERN')),
95 95 ]
96 96
97 97 commitopts = [
98 98 ('m', 'message', '',
99 99 _('use text as commit message'), _('TEXT')),
100 100 ('l', 'logfile', '',
101 101 _('read commit message from file'), _('FILE')),
102 102 ]
103 103
104 104 commitopts2 = [
105 105 ('d', 'date', '',
106 106 _('record the specified date as commit date'), _('DATE')),
107 107 ('u', 'user', '',
108 108 _('record the specified user as committer'), _('USER')),
109 109 ]
110 110
111 111 # hidden for now
112 112 formatteropts = [
113 113 ('T', 'template', '',
114 114 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
115 115 ]
116 116
117 117 templateopts = [
118 118 ('', 'style', '',
119 119 _('display using template map file (DEPRECATED)'), _('STYLE')),
120 120 ('T', 'template', '',
121 121 _('display with template'), _('TEMPLATE')),
122 122 ]
123 123
124 124 logopts = [
125 125 ('p', 'patch', None, _('show patch')),
126 126 ('g', 'git', None, _('use git extended diff format')),
127 127 ('l', 'limit', '',
128 128 _('limit number of changes displayed'), _('NUM')),
129 129 ('M', 'no-merges', None, _('do not show merges')),
130 130 ('', 'stat', None, _('output diffstat-style summary of changes')),
131 131 ('G', 'graph', None, _("show the revision DAG")),
132 132 ] + templateopts
133 133
134 134 diffopts = [
135 135 ('a', 'text', None, _('treat all files as text')),
136 136 ('g', 'git', None, _('use git extended diff format')),
137 137 ('', 'nodates', None, _('omit dates from diff headers'))
138 138 ]
139 139
140 140 diffwsopts = [
141 141 ('w', 'ignore-all-space', None,
142 142 _('ignore white space when comparing lines')),
143 143 ('b', 'ignore-space-change', None,
144 144 _('ignore changes in the amount of white space')),
145 145 ('B', 'ignore-blank-lines', None,
146 146 _('ignore changes whose lines are all blank')),
147 147 ]
148 148
149 149 diffopts2 = [
150 150 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
151 151 ('p', 'show-function', None, _('show which function each change is in')),
152 152 ('', 'reverse', None, _('produce a diff that undoes the changes')),
153 153 ] + diffwsopts + [
154 154 ('U', 'unified', '',
155 155 _('number of lines of context to show'), _('NUM')),
156 156 ('', 'stat', None, _('output diffstat-style summary of changes')),
157 157 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
158 158 ]
159 159
160 160 mergetoolopts = [
161 161 ('t', 'tool', '', _('specify merge tool')),
162 162 ]
163 163
164 164 similarityopts = [
165 165 ('s', 'similarity', '',
166 166 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
167 167 ]
168 168
169 169 subrepoopts = [
170 170 ('S', 'subrepos', None,
171 171 _('recurse into subrepositories'))
172 172 ]
173 173
174 174 # Commands start here, listed alphabetically
175 175
176 176 @command('^add',
177 177 walkopts + subrepoopts + dryrunopts,
178 178 _('[OPTION]... [FILE]...'),
179 179 inferrepo=True)
180 180 def add(ui, repo, *pats, **opts):
181 181 """add the specified files on the next commit
182 182
183 183 Schedule files to be version controlled and added to the
184 184 repository.
185 185
186 186 The files will be added to the repository at the next commit. To
187 187 undo an add before that, see :hg:`forget`.
188 188
189 189 If no names are given, add all files to the repository.
190 190
191 191 .. container:: verbose
192 192
193 193 An example showing how new (unknown) files are added
194 194 automatically by :hg:`add`::
195 195
196 196 $ ls
197 197 foo.c
198 198 $ hg status
199 199 ? foo.c
200 200 $ hg add
201 201 adding foo.c
202 202 $ hg status
203 203 A foo.c
204 204
205 205 Returns 0 if all files are successfully added.
206 206 """
207 207
208 208 m = scmutil.match(repo[None], pats, opts)
209 209 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
210 210 return rejected and 1 or 0
211 211
212 212 @command('addremove',
213 213 similarityopts + subrepoopts + walkopts + dryrunopts,
214 214 _('[OPTION]... [FILE]...'),
215 215 inferrepo=True)
216 216 def addremove(ui, repo, *pats, **opts):
217 217 """add all new files, delete all missing files
218 218
219 219 Add all new files and remove all missing files from the
220 220 repository.
221 221
222 222 New files are ignored if they match any of the patterns in
223 223 ``.hgignore``. As with add, these changes take effect at the next
224 224 commit.
225 225
226 226 Use the -s/--similarity option to detect renamed files. This
227 227 option takes a percentage between 0 (disabled) and 100 (files must
228 228 be identical) as its parameter. With a parameter greater than 0,
229 229 this compares every removed file with every added file and records
230 230 those similar enough as renames. Detecting renamed files this way
231 231 can be expensive. After using this option, :hg:`status -C` can be
232 232 used to check which files were identified as moved or renamed. If
233 233 not specified, -s/--similarity defaults to 100 and only renames of
234 234 identical files are detected.
235 235
236 236 Returns 0 if all files are successfully added.
237 237 """
238 238 try:
239 239 sim = float(opts.get('similarity') or 100)
240 240 except ValueError:
241 241 raise util.Abort(_('similarity must be a number'))
242 242 if sim < 0 or sim > 100:
243 243 raise util.Abort(_('similarity must be between 0 and 100'))
244 244 matcher = scmutil.match(repo[None], pats, opts)
245 245 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
246 246
247 247 @command('^annotate|blame',
248 248 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
249 249 ('', 'follow', None,
250 250 _('follow copies/renames and list the filename (DEPRECATED)')),
251 251 ('', 'no-follow', None, _("don't follow copies and renames")),
252 252 ('a', 'text', None, _('treat all files as text')),
253 253 ('u', 'user', None, _('list the author (long with -v)')),
254 254 ('f', 'file', None, _('list the filename')),
255 255 ('d', 'date', None, _('list the date (short with -q)')),
256 256 ('n', 'number', None, _('list the revision number (default)')),
257 257 ('c', 'changeset', None, _('list the changeset')),
258 258 ('l', 'line-number', None, _('show line number at the first appearance'))
259 259 ] + diffwsopts + walkopts + formatteropts,
260 260 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
261 261 inferrepo=True)
262 262 def annotate(ui, repo, *pats, **opts):
263 263 """show changeset information by line for each file
264 264
265 265 List changes in files, showing the revision id responsible for
266 266 each line
267 267
268 268 This command is useful for discovering when a change was made and
269 269 by whom.
270 270
271 271 Without the -a/--text option, annotate will avoid processing files
272 272 it detects as binary. With -a, annotate will annotate the file
273 273 anyway, although the results will probably be neither useful
274 274 nor desirable.
275 275
276 276 Returns 0 on success.
277 277 """
278 278 if not pats:
279 279 raise util.Abort(_('at least one filename or pattern is required'))
280 280
281 281 if opts.get('follow'):
282 282 # --follow is deprecated and now just an alias for -f/--file
283 283 # to mimic the behavior of Mercurial before version 1.5
284 284 opts['file'] = True
285 285
286 286 ctx = scmutil.revsingle(repo, opts.get('rev'))
287 287
288 288 fm = ui.formatter('annotate', opts)
289 289 if ui.quiet:
290 290 datefunc = util.shortdate
291 291 else:
292 292 datefunc = util.datestr
293 293 if ctx.rev() is None:
294 294 def hexfn(node):
295 295 if node is None:
296 296 return None
297 297 else:
298 298 return fm.hexfunc(node)
299 299 if opts.get('changeset'):
300 300 # omit "+" suffix which is appended to node hex
301 301 def formatrev(rev):
302 302 if rev is None:
303 303 return '%d' % ctx.p1().rev()
304 304 else:
305 305 return '%d' % rev
306 306 else:
307 307 def formatrev(rev):
308 308 if rev is None:
309 309 return '%d+' % ctx.p1().rev()
310 310 else:
311 311 return '%d ' % rev
312 312 def formathex(hex):
313 313 if hex is None:
314 314 return '%s+' % fm.hexfunc(ctx.p1().node())
315 315 else:
316 316 return '%s ' % hex
317 317 else:
318 318 hexfn = fm.hexfunc
319 319 formatrev = formathex = str
320 320
321 321 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
322 322 ('number', ' ', lambda x: x[0].rev(), formatrev),
323 323 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
324 324 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
325 325 ('file', ' ', lambda x: x[0].path(), str),
326 326 ('line_number', ':', lambda x: x[1], str),
327 327 ]
328 328 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
329 329
330 330 if (not opts.get('user') and not opts.get('changeset')
331 331 and not opts.get('date') and not opts.get('file')):
332 332 opts['number'] = True
333 333
334 334 linenumber = opts.get('line_number') is not None
335 335 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
336 336 raise util.Abort(_('at least one of -n/-c is required for -l'))
337 337
338 338 if fm:
339 339 def makefunc(get, fmt):
340 340 return get
341 341 else:
342 342 def makefunc(get, fmt):
343 343 return lambda x: fmt(get(x))
344 344 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
345 345 if opts.get(op)]
346 346 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
347 347 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
348 348 if opts.get(op))
349 349
350 350 def bad(x, y):
351 351 raise util.Abort("%s: %s" % (x, y))
352 352
353 353 m = scmutil.match(ctx, pats, opts, badfn=bad)
354 354
355 355 follow = not opts.get('no_follow')
356 356 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
357 357 whitespace=True)
358 358 for abs in ctx.walk(m):
359 359 fctx = ctx[abs]
360 360 if not opts.get('text') and util.binary(fctx.data()):
361 361 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
362 362 continue
363 363
364 364 lines = fctx.annotate(follow=follow, linenumber=linenumber,
365 365 diffopts=diffopts)
366 366 formats = []
367 367 pieces = []
368 368
369 369 for f, sep in funcmap:
370 370 l = [f(n) for n, dummy in lines]
371 371 if l:
372 372 if fm:
373 373 formats.append(['%s' for x in l])
374 374 else:
375 375 sizes = [encoding.colwidth(x) for x in l]
376 376 ml = max(sizes)
377 377 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
378 378 pieces.append(l)
379 379
380 380 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
381 381 fm.startitem()
382 382 fm.write(fields, "".join(f), *p)
383 383 fm.write('line', ": %s", l[1])
384 384
385 385 if lines and not lines[-1][1].endswith('\n'):
386 386 fm.plain('\n')
387 387
388 388 fm.end()
389 389
390 390 @command('archive',
391 391 [('', 'no-decode', None, _('do not pass files through decoders')),
392 392 ('p', 'prefix', '', _('directory prefix for files in archive'),
393 393 _('PREFIX')),
394 394 ('r', 'rev', '', _('revision to distribute'), _('REV')),
395 395 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
396 396 ] + subrepoopts + walkopts,
397 397 _('[OPTION]... DEST'))
398 398 def archive(ui, repo, dest, **opts):
399 399 '''create an unversioned archive of a repository revision
400 400
401 401 By default, the revision used is the parent of the working
402 402 directory; use -r/--rev to specify a different revision.
403 403
404 404 The archive type is automatically detected based on file
405 405 extension (or override using -t/--type).
406 406
407 407 .. container:: verbose
408 408
409 409 Examples:
410 410
411 411 - create a zip file containing the 1.0 release::
412 412
413 413 hg archive -r 1.0 project-1.0.zip
414 414
415 415 - create a tarball excluding .hg files::
416 416
417 417 hg archive project.tar.gz -X ".hg*"
418 418
419 419 Valid types are:
420 420
421 421 :``files``: a directory full of files (default)
422 422 :``tar``: tar archive, uncompressed
423 423 :``tbz2``: tar archive, compressed using bzip2
424 424 :``tgz``: tar archive, compressed using gzip
425 425 :``uzip``: zip archive, uncompressed
426 426 :``zip``: zip archive, compressed using deflate
427 427
428 428 The exact name of the destination archive or directory is given
429 429 using a format string; see :hg:`help export` for details.
430 430
431 431 Each member added to an archive file has a directory prefix
432 432 prepended. Use -p/--prefix to specify a format string for the
433 433 prefix. The default is the basename of the archive, with suffixes
434 434 removed.
435 435
436 436 Returns 0 on success.
437 437 '''
438 438
439 439 ctx = scmutil.revsingle(repo, opts.get('rev'))
440 440 if not ctx:
441 441 raise util.Abort(_('no working directory: please specify a revision'))
442 442 node = ctx.node()
443 443 dest = cmdutil.makefilename(repo, dest, node)
444 444 if os.path.realpath(dest) == repo.root:
445 445 raise util.Abort(_('repository root cannot be destination'))
446 446
447 447 kind = opts.get('type') or archival.guesskind(dest) or 'files'
448 448 prefix = opts.get('prefix')
449 449
450 450 if dest == '-':
451 451 if kind == 'files':
452 452 raise util.Abort(_('cannot archive plain files to stdout'))
453 453 dest = cmdutil.makefileobj(repo, dest)
454 454 if not prefix:
455 455 prefix = os.path.basename(repo.root) + '-%h'
456 456
457 457 prefix = cmdutil.makefilename(repo, prefix, node)
458 458 matchfn = scmutil.match(ctx, [], opts)
459 459 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
460 460 matchfn, prefix, subrepos=opts.get('subrepos'))
461 461
462 462 @command('backout',
463 463 [('', 'merge', None, _('merge with old dirstate parent after backout')),
464 464 ('', 'commit', None, _('commit if no conflicts were encountered')),
465 465 ('', 'parent', '',
466 466 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
467 467 ('r', 'rev', '', _('revision to backout'), _('REV')),
468 468 ('e', 'edit', False, _('invoke editor on commit messages')),
469 469 ] + mergetoolopts + walkopts + commitopts + commitopts2,
470 470 _('[OPTION]... [-r] REV'))
471 471 def backout(ui, repo, node=None, rev=None, commit=False, **opts):
472 472 '''reverse effect of earlier changeset
473 473
474 474 Prepare a new changeset with the effect of REV undone in the
475 475 current working directory.
476 476
477 477 If REV is the parent of the working directory, then this new changeset
478 478 is committed automatically. Otherwise, hg needs to merge the
479 479 changes and the merged result is left uncommitted.
480 480
481 481 .. note::
482 482
483 483 backout cannot be used to fix either an unwanted or
484 484 incorrect merge.
485 485
486 486 .. container:: verbose
487 487
488 488 By default, the pending changeset will have one parent,
489 489 maintaining a linear history. With --merge, the pending
490 490 changeset will instead have two parents: the old parent of the
491 491 working directory and a new child of REV that simply undoes REV.
492 492
493 493 Before version 1.7, the behavior without --merge was equivalent
494 494 to specifying --merge followed by :hg:`update --clean .` to
495 495 cancel the merge and leave the child of REV as a head to be
496 496 merged separately.
497 497
498 498 See :hg:`help dates` for a list of formats valid for -d/--date.
499 499
500 500 Returns 0 on success, 1 if nothing to backout or there are unresolved
501 501 files.
502 502 '''
503 503 if rev and node:
504 504 raise util.Abort(_("please specify just one revision"))
505 505
506 506 if not rev:
507 507 rev = node
508 508
509 509 if not rev:
510 510 raise util.Abort(_("please specify a revision to backout"))
511 511
512 512 date = opts.get('date')
513 513 if date:
514 514 opts['date'] = util.parsedate(date)
515 515
516 516 cmdutil.checkunfinished(repo)
517 517 cmdutil.bailifchanged(repo)
518 518 node = scmutil.revsingle(repo, rev).node()
519 519
520 520 op1, op2 = repo.dirstate.parents()
521 521 if not repo.changelog.isancestor(node, op1):
522 522 raise util.Abort(_('cannot backout change that is not an ancestor'))
523 523
524 524 p1, p2 = repo.changelog.parents(node)
525 525 if p1 == nullid:
526 526 raise util.Abort(_('cannot backout a change with no parents'))
527 527 if p2 != nullid:
528 528 if not opts.get('parent'):
529 529 raise util.Abort(_('cannot backout a merge changeset'))
530 530 p = repo.lookup(opts['parent'])
531 531 if p not in (p1, p2):
532 532 raise util.Abort(_('%s is not a parent of %s') %
533 533 (short(p), short(node)))
534 534 parent = p
535 535 else:
536 536 if opts.get('parent'):
537 537 raise util.Abort(_('cannot use --parent on non-merge changeset'))
538 538 parent = p1
539 539
540 540 # the backout should appear on the same branch
541 541 wlock = repo.wlock()
542 542 try:
543 543 branch = repo.dirstate.branch()
544 544 bheads = repo.branchheads(branch)
545 545 rctx = scmutil.revsingle(repo, hex(parent))
546 546 if not opts.get('merge') and op1 != node:
547 547 try:
548 548 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
549 549 'backout')
550 550 repo.dirstate.beginparentchange()
551 551 stats = mergemod.update(repo, parent, True, True, False,
552 552 node, False)
553 553 repo.setparents(op1, op2)
554 554 repo.dirstate.endparentchange()
555 555 hg._showstats(repo, stats)
556 556 if stats[3]:
557 557 repo.ui.status(_("use 'hg resolve' to retry unresolved "
558 558 "file merges\n"))
559 559 return 1
560 560 elif not commit:
561 561 msg = _("changeset %s backed out, "
562 562 "don't forget to commit.\n")
563 563 ui.status(msg % short(node))
564 564 return 0
565 565 finally:
566 566 ui.setconfig('ui', 'forcemerge', '', '')
567 567 else:
568 568 hg.clean(repo, node, show_stats=False)
569 569 repo.dirstate.setbranch(branch)
570 570 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
571 571
572 572
573 573 def commitfunc(ui, repo, message, match, opts):
574 574 editform = 'backout'
575 575 e = cmdutil.getcommiteditor(editform=editform, **opts)
576 576 if not message:
577 577 # we don't translate commit messages
578 578 message = "Backed out changeset %s" % short(node)
579 579 e = cmdutil.getcommiteditor(edit=True, editform=editform)
580 580 return repo.commit(message, opts.get('user'), opts.get('date'),
581 581 match, editor=e)
582 582 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
583 583 if not newnode:
584 584 ui.status(_("nothing changed\n"))
585 585 return 1
586 586 cmdutil.commitstatus(repo, newnode, branch, bheads)
587 587
588 588 def nice(node):
589 589 return '%d:%s' % (repo.changelog.rev(node), short(node))
590 590 ui.status(_('changeset %s backs out changeset %s\n') %
591 591 (nice(repo.changelog.tip()), nice(node)))
592 592 if opts.get('merge') and op1 != node:
593 593 hg.clean(repo, op1, show_stats=False)
594 594 ui.status(_('merging with changeset %s\n')
595 595 % nice(repo.changelog.tip()))
596 596 try:
597 597 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
598 598 'backout')
599 599 return hg.merge(repo, hex(repo.changelog.tip()))
600 600 finally:
601 601 ui.setconfig('ui', 'forcemerge', '', '')
602 602 finally:
603 603 wlock.release()
604 604 return 0
605 605
606 606 @command('bisect',
607 607 [('r', 'reset', False, _('reset bisect state')),
608 608 ('g', 'good', False, _('mark changeset good')),
609 609 ('b', 'bad', False, _('mark changeset bad')),
610 610 ('s', 'skip', False, _('skip testing changeset')),
611 611 ('e', 'extend', False, _('extend the bisect range')),
612 612 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
613 613 ('U', 'noupdate', False, _('do not update to target'))],
614 614 _("[-gbsr] [-U] [-c CMD] [REV]"))
615 615 def bisect(ui, repo, rev=None, extra=None, command=None,
616 616 reset=None, good=None, bad=None, skip=None, extend=None,
617 617 noupdate=None):
618 618 """subdivision search of changesets
619 619
620 620 This command helps to find changesets which introduce problems. To
621 621 use, mark the earliest changeset you know exhibits the problem as
622 622 bad, then mark the latest changeset which is free from the problem
623 623 as good. Bisect will update your working directory to a revision
624 624 for testing (unless the -U/--noupdate option is specified). Once
625 625 you have performed tests, mark the working directory as good or
626 626 bad, and bisect will either update to another candidate changeset
627 627 or announce that it has found the bad revision.
628 628
629 629 As a shortcut, you can also use the revision argument to mark a
630 630 revision as good or bad without checking it out first.
631 631
632 632 If you supply a command, it will be used for automatic bisection.
633 633 The environment variable HG_NODE will contain the ID of the
634 634 changeset being tested. The exit status of the command will be
635 635 used to mark revisions as good or bad: status 0 means good, 125
636 636 means to skip the revision, 127 (command not found) will abort the
637 637 bisection, and any other non-zero exit status means the revision
638 638 is bad.
639 639
640 640 .. container:: verbose
641 641
642 642 Some examples:
643 643
644 644 - start a bisection with known bad revision 34, and good revision 12::
645 645
646 646 hg bisect --bad 34
647 647 hg bisect --good 12
648 648
649 649 - advance the current bisection by marking current revision as good or
650 650 bad::
651 651
652 652 hg bisect --good
653 653 hg bisect --bad
654 654
655 655 - mark the current revision, or a known revision, to be skipped (e.g. if
656 656 that revision is not usable because of another issue)::
657 657
658 658 hg bisect --skip
659 659 hg bisect --skip 23
660 660
661 661 - skip all revisions that do not touch directories ``foo`` or ``bar``::
662 662
663 663 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
664 664
665 665 - forget the current bisection::
666 666
667 667 hg bisect --reset
668 668
669 669 - use 'make && make tests' to automatically find the first broken
670 670 revision::
671 671
672 672 hg bisect --reset
673 673 hg bisect --bad 34
674 674 hg bisect --good 12
675 675 hg bisect --command "make && make tests"
676 676
677 677 - see all changesets whose states are already known in the current
678 678 bisection::
679 679
680 680 hg log -r "bisect(pruned)"
681 681
682 682 - see the changeset currently being bisected (especially useful
683 683 if running with -U/--noupdate)::
684 684
685 685 hg log -r "bisect(current)"
686 686
687 687 - see all changesets that took part in the current bisection::
688 688
689 689 hg log -r "bisect(range)"
690 690
691 691 - you can even get a nice graph::
692 692
693 693 hg log --graph -r "bisect(range)"
694 694
695 695 See :hg:`help revsets` for more about the `bisect()` keyword.
696 696
697 697 Returns 0 on success.
698 698 """
699 699 def extendbisectrange(nodes, good):
700 700 # bisect is incomplete when it ends on a merge node and
701 701 # one of the parent was not checked.
702 702 parents = repo[nodes[0]].parents()
703 703 if len(parents) > 1:
704 704 if good:
705 705 side = state['bad']
706 706 else:
707 707 side = state['good']
708 708 num = len(set(i.node() for i in parents) & set(side))
709 709 if num == 1:
710 710 return parents[0].ancestor(parents[1])
711 711 return None
712 712
713 713 def print_result(nodes, good):
714 714 displayer = cmdutil.show_changeset(ui, repo, {})
715 715 if len(nodes) == 1:
716 716 # narrowed it down to a single revision
717 717 if good:
718 718 ui.write(_("The first good revision is:\n"))
719 719 else:
720 720 ui.write(_("The first bad revision is:\n"))
721 721 displayer.show(repo[nodes[0]])
722 722 extendnode = extendbisectrange(nodes, good)
723 723 if extendnode is not None:
724 724 ui.write(_('Not all ancestors of this changeset have been'
725 725 ' checked.\nUse bisect --extend to continue the '
726 726 'bisection from\nthe common ancestor, %s.\n')
727 727 % extendnode)
728 728 else:
729 729 # multiple possible revisions
730 730 if good:
731 731 ui.write(_("Due to skipped revisions, the first "
732 732 "good revision could be any of:\n"))
733 733 else:
734 734 ui.write(_("Due to skipped revisions, the first "
735 735 "bad revision could be any of:\n"))
736 736 for n in nodes:
737 737 displayer.show(repo[n])
738 738 displayer.close()
739 739
740 740 def check_state(state, interactive=True):
741 741 if not state['good'] or not state['bad']:
742 742 if (good or bad or skip or reset) and interactive:
743 743 return
744 744 if not state['good']:
745 745 raise util.Abort(_('cannot bisect (no known good revisions)'))
746 746 else:
747 747 raise util.Abort(_('cannot bisect (no known bad revisions)'))
748 748 return True
749 749
750 750 # backward compatibility
751 751 if rev in "good bad reset init".split():
752 752 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
753 753 cmd, rev, extra = rev, extra, None
754 754 if cmd == "good":
755 755 good = True
756 756 elif cmd == "bad":
757 757 bad = True
758 758 else:
759 759 reset = True
760 760 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
761 761 raise util.Abort(_('incompatible arguments'))
762 762
763 763 cmdutil.checkunfinished(repo)
764 764
765 765 if reset:
766 766 p = repo.join("bisect.state")
767 767 if os.path.exists(p):
768 768 os.unlink(p)
769 769 return
770 770
771 771 state = hbisect.load_state(repo)
772 772
773 773 if command:
774 774 changesets = 1
775 775 if noupdate:
776 776 try:
777 777 node = state['current'][0]
778 778 except LookupError:
779 779 raise util.Abort(_('current bisect revision is unknown - '
780 780 'start a new bisect to fix'))
781 781 else:
782 782 node, p2 = repo.dirstate.parents()
783 783 if p2 != nullid:
784 784 raise util.Abort(_('current bisect revision is a merge'))
785 785 try:
786 786 while changesets:
787 787 # update state
788 788 state['current'] = [node]
789 789 hbisect.save_state(repo, state)
790 790 status = ui.system(command, environ={'HG_NODE': hex(node)})
791 791 if status == 125:
792 792 transition = "skip"
793 793 elif status == 0:
794 794 transition = "good"
795 795 # status < 0 means process was killed
796 796 elif status == 127:
797 797 raise util.Abort(_("failed to execute %s") % command)
798 798 elif status < 0:
799 799 raise util.Abort(_("%s killed") % command)
800 800 else:
801 801 transition = "bad"
802 802 ctx = scmutil.revsingle(repo, rev, node)
803 803 rev = None # clear for future iterations
804 804 state[transition].append(ctx.node())
805 805 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
806 806 check_state(state, interactive=False)
807 807 # bisect
808 808 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
809 809 # update to next check
810 810 node = nodes[0]
811 811 if not noupdate:
812 812 cmdutil.bailifchanged(repo)
813 813 hg.clean(repo, node, show_stats=False)
814 814 finally:
815 815 state['current'] = [node]
816 816 hbisect.save_state(repo, state)
817 817 print_result(nodes, bgood)
818 818 return
819 819
820 820 # update state
821 821
822 822 if rev:
823 823 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
824 824 else:
825 825 nodes = [repo.lookup('.')]
826 826
827 827 if good or bad or skip:
828 828 if good:
829 829 state['good'] += nodes
830 830 elif bad:
831 831 state['bad'] += nodes
832 832 elif skip:
833 833 state['skip'] += nodes
834 834 hbisect.save_state(repo, state)
835 835
836 836 if not check_state(state):
837 837 return
838 838
839 839 # actually bisect
840 840 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
841 841 if extend:
842 842 if not changesets:
843 843 extendnode = extendbisectrange(nodes, good)
844 844 if extendnode is not None:
845 845 ui.write(_("Extending search to changeset %d:%s\n")
846 846 % (extendnode.rev(), extendnode))
847 847 state['current'] = [extendnode.node()]
848 848 hbisect.save_state(repo, state)
849 849 if noupdate:
850 850 return
851 851 cmdutil.bailifchanged(repo)
852 852 return hg.clean(repo, extendnode.node())
853 853 raise util.Abort(_("nothing to extend"))
854 854
855 855 if changesets == 0:
856 856 print_result(nodes, good)
857 857 else:
858 858 assert len(nodes) == 1 # only a single node can be tested next
859 859 node = nodes[0]
860 860 # compute the approximate number of remaining tests
861 861 tests, size = 0, 2
862 862 while size <= changesets:
863 863 tests, size = tests + 1, size * 2
864 864 rev = repo.changelog.rev(node)
865 865 ui.write(_("Testing changeset %d:%s "
866 866 "(%d changesets remaining, ~%d tests)\n")
867 867 % (rev, short(node), changesets, tests))
868 868 state['current'] = [node]
869 869 hbisect.save_state(repo, state)
870 870 if not noupdate:
871 871 cmdutil.bailifchanged(repo)
872 872 return hg.clean(repo, node)
873 873
874 874 @command('bookmarks|bookmark',
875 875 [('f', 'force', False, _('force')),
876 876 ('r', 'rev', '', _('revision'), _('REV')),
877 877 ('d', 'delete', False, _('delete a given bookmark')),
878 878 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
879 879 ('i', 'inactive', False, _('mark a bookmark inactive')),
880 880 ] + formatteropts,
881 881 _('hg bookmarks [OPTIONS]... [NAME]...'))
882 882 def bookmark(ui, repo, *names, **opts):
883 883 '''create a new bookmark or list existing bookmarks
884 884
885 885 Bookmarks are labels on changesets to help track lines of development.
886 886 Bookmarks are unversioned and can be moved, renamed and deleted.
887 887 Deleting or moving a bookmark has no effect on the associated changesets.
888 888
889 889 Creating or updating to a bookmark causes it to be marked as 'active'.
890 890 The active bookmark is indicated with a '*'.
891 891 When a commit is made, the active bookmark will advance to the new commit.
892 892 A plain :hg:`update` will also advance an active bookmark, if possible.
893 893 Updating away from a bookmark will cause it to be deactivated.
894 894
895 895 Bookmarks can be pushed and pulled between repositories (see
896 896 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
897 897 diverged, a new 'divergent bookmark' of the form 'name@path' will
898 898 be created. Using :hg:`merge` will resolve the divergence.
899 899
900 900 A bookmark named '@' has the special property that :hg:`clone` will
901 901 check it out by default if it exists.
902 902
903 903 .. container:: verbose
904 904
905 905 Examples:
906 906
907 907 - create an active bookmark for a new line of development::
908 908
909 909 hg book new-feature
910 910
911 911 - create an inactive bookmark as a place marker::
912 912
913 913 hg book -i reviewed
914 914
915 915 - create an inactive bookmark on another changeset::
916 916
917 917 hg book -r .^ tested
918 918
919 919 - move the '@' bookmark from another branch::
920 920
921 921 hg book -f @
922 922 '''
923 923 force = opts.get('force')
924 924 rev = opts.get('rev')
925 925 delete = opts.get('delete')
926 926 rename = opts.get('rename')
927 927 inactive = opts.get('inactive')
928 928
929 929 def checkformat(mark):
930 930 mark = mark.strip()
931 931 if not mark:
932 932 raise util.Abort(_("bookmark names cannot consist entirely of "
933 933 "whitespace"))
934 934 scmutil.checknewlabel(repo, mark, 'bookmark')
935 935 return mark
936 936
937 937 def checkconflict(repo, mark, cur, force=False, target=None):
938 938 if mark in marks and not force:
939 939 if target:
940 940 if marks[mark] == target and target == cur:
941 941 # re-activating a bookmark
942 942 return
943 943 anc = repo.changelog.ancestors([repo[target].rev()])
944 944 bmctx = repo[marks[mark]]
945 945 divs = [repo[b].node() for b in marks
946 946 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
947 947
948 948 # allow resolving a single divergent bookmark even if moving
949 949 # the bookmark across branches when a revision is specified
950 950 # that contains a divergent bookmark
951 951 if bmctx.rev() not in anc and target in divs:
952 952 bookmarks.deletedivergent(repo, [target], mark)
953 953 return
954 954
955 955 deletefrom = [b for b in divs
956 956 if repo[b].rev() in anc or b == target]
957 957 bookmarks.deletedivergent(repo, deletefrom, mark)
958 958 if bookmarks.validdest(repo, bmctx, repo[target]):
959 959 ui.status(_("moving bookmark '%s' forward from %s\n") %
960 960 (mark, short(bmctx.node())))
961 961 return
962 962 raise util.Abort(_("bookmark '%s' already exists "
963 963 "(use -f to force)") % mark)
964 964 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
965 965 and not force):
966 966 raise util.Abort(
967 967 _("a bookmark cannot have the name of an existing branch"))
968 968
969 969 if delete and rename:
970 970 raise util.Abort(_("--delete and --rename are incompatible"))
971 971 if delete and rev:
972 972 raise util.Abort(_("--rev is incompatible with --delete"))
973 973 if rename and rev:
974 974 raise util.Abort(_("--rev is incompatible with --rename"))
975 975 if not names and (delete or rev):
976 976 raise util.Abort(_("bookmark name required"))
977 977
978 978 if delete or rename or names or inactive:
979 979 wlock = repo.wlock()
980 980 try:
981 981 cur = repo.changectx('.').node()
982 982 marks = repo._bookmarks
983 983 if delete:
984 984 for mark in names:
985 985 if mark not in marks:
986 986 raise util.Abort(_("bookmark '%s' does not exist") %
987 987 mark)
988 988 if mark == repo._activebookmark:
989 989 bookmarks.deactivate(repo)
990 990 del marks[mark]
991 991 marks.write()
992 992
993 993 elif rename:
994 994 if not names:
995 995 raise util.Abort(_("new bookmark name required"))
996 996 elif len(names) > 1:
997 997 raise util.Abort(_("only one new bookmark name allowed"))
998 998 mark = checkformat(names[0])
999 999 if rename not in marks:
1000 1000 raise util.Abort(_("bookmark '%s' does not exist") % rename)
1001 1001 checkconflict(repo, mark, cur, force)
1002 1002 marks[mark] = marks[rename]
1003 1003 if repo._activebookmark == rename and not inactive:
1004 1004 bookmarks.activate(repo, mark)
1005 1005 del marks[rename]
1006 1006 marks.write()
1007 1007
1008 1008 elif names:
1009 1009 newact = None
1010 1010 for mark in names:
1011 1011 mark = checkformat(mark)
1012 1012 if newact is None:
1013 1013 newact = mark
1014 1014 if inactive and mark == repo._activebookmark:
1015 1015 bookmarks.deactivate(repo)
1016 1016 return
1017 1017 tgt = cur
1018 1018 if rev:
1019 1019 tgt = scmutil.revsingle(repo, rev).node()
1020 1020 checkconflict(repo, mark, cur, force, tgt)
1021 1021 marks[mark] = tgt
1022 1022 if not inactive and cur == marks[newact] and not rev:
1023 1023 bookmarks.activate(repo, newact)
1024 1024 elif cur != tgt and newact == repo._activebookmark:
1025 1025 bookmarks.deactivate(repo)
1026 1026 marks.write()
1027 1027
1028 1028 elif inactive:
1029 1029 if len(marks) == 0:
1030 1030 ui.status(_("no bookmarks set\n"))
1031 1031 elif not repo._activebookmark:
1032 1032 ui.status(_("no active bookmark\n"))
1033 1033 else:
1034 1034 bookmarks.deactivate(repo)
1035 1035 finally:
1036 1036 wlock.release()
1037 1037 else: # show bookmarks
1038 1038 fm = ui.formatter('bookmarks', opts)
1039 1039 hexfn = fm.hexfunc
1040 1040 marks = repo._bookmarks
1041 1041 if len(marks) == 0 and not fm:
1042 1042 ui.status(_("no bookmarks set\n"))
1043 1043 for bmark, n in sorted(marks.iteritems()):
1044 1044 active = repo._activebookmark
1045 1045 if bmark == active:
1046 1046 prefix, label = '*', activebookmarklabel
1047 1047 else:
1048 1048 prefix, label = ' ', ''
1049 1049
1050 1050 fm.startitem()
1051 1051 if not ui.quiet:
1052 1052 fm.plain(' %s ' % prefix, label=label)
1053 1053 fm.write('bookmark', '%s', bmark, label=label)
1054 1054 pad = " " * (25 - encoding.colwidth(bmark))
1055 1055 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1056 1056 repo.changelog.rev(n), hexfn(n), label=label)
1057 1057 fm.data(active=(bmark == active))
1058 1058 fm.plain('\n')
1059 1059 fm.end()
1060 1060
1061 1061 @command('branch',
1062 1062 [('f', 'force', None,
1063 1063 _('set branch name even if it shadows an existing branch')),
1064 1064 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1065 1065 _('[-fC] [NAME]'))
1066 1066 def branch(ui, repo, label=None, **opts):
1067 1067 """set or show the current branch name
1068 1068
1069 1069 .. note::
1070 1070
1071 1071 Branch names are permanent and global. Use :hg:`bookmark` to create a
1072 1072 light-weight bookmark instead. See :hg:`help glossary` for more
1073 1073 information about named branches and bookmarks.
1074 1074
1075 1075 With no argument, show the current branch name. With one argument,
1076 1076 set the working directory branch name (the branch will not exist
1077 1077 in the repository until the next commit). Standard practice
1078 1078 recommends that primary development take place on the 'default'
1079 1079 branch.
1080 1080
1081 1081 Unless -f/--force is specified, branch will not let you set a
1082 1082 branch name that already exists.
1083 1083
1084 1084 Use -C/--clean to reset the working directory branch to that of
1085 1085 the parent of the working directory, negating a previous branch
1086 1086 change.
1087 1087
1088 1088 Use the command :hg:`update` to switch to an existing branch. Use
1089 1089 :hg:`commit --close-branch` to mark this branch head as closed.
1090 1090 When all heads of the branch are closed, the branch will be
1091 1091 considered closed.
1092 1092
1093 1093 Returns 0 on success.
1094 1094 """
1095 1095 if label:
1096 1096 label = label.strip()
1097 1097
1098 1098 if not opts.get('clean') and not label:
1099 1099 ui.write("%s\n" % repo.dirstate.branch())
1100 1100 return
1101 1101
1102 1102 wlock = repo.wlock()
1103 1103 try:
1104 1104 if opts.get('clean'):
1105 1105 label = repo[None].p1().branch()
1106 1106 repo.dirstate.setbranch(label)
1107 1107 ui.status(_('reset working directory to branch %s\n') % label)
1108 1108 elif label:
1109 1109 if not opts.get('force') and label in repo.branchmap():
1110 1110 if label not in [p.branch() for p in repo.parents()]:
1111 1111 raise util.Abort(_('a branch of the same name already'
1112 1112 ' exists'),
1113 1113 # i18n: "it" refers to an existing branch
1114 1114 hint=_("use 'hg update' to switch to it"))
1115 1115 scmutil.checknewlabel(repo, label, 'branch')
1116 1116 repo.dirstate.setbranch(label)
1117 1117 ui.status(_('marked working directory as branch %s\n') % label)
1118 1118
1119 1119 # find any open named branches aside from default
1120 1120 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1121 1121 if n != "default" and not c]
1122 1122 if not others:
1123 1123 ui.status(_('(branches are permanent and global, '
1124 1124 'did you want a bookmark?)\n'))
1125 1125 finally:
1126 1126 wlock.release()
1127 1127
1128 1128 @command('branches',
1129 1129 [('a', 'active', False,
1130 1130 _('show only branches that have unmerged heads (DEPRECATED)')),
1131 1131 ('c', 'closed', False, _('show normal and closed branches')),
1132 1132 ] + formatteropts,
1133 1133 _('[-ac]'))
1134 1134 def branches(ui, repo, active=False, closed=False, **opts):
1135 1135 """list repository named branches
1136 1136
1137 1137 List the repository's named branches, indicating which ones are
1138 1138 inactive. If -c/--closed is specified, also list branches which have
1139 1139 been marked closed (see :hg:`commit --close-branch`).
1140 1140
1141 1141 Use the command :hg:`update` to switch to an existing branch.
1142 1142
1143 1143 Returns 0.
1144 1144 """
1145 1145
1146 1146 fm = ui.formatter('branches', opts)
1147 1147 hexfunc = fm.hexfunc
1148 1148
1149 1149 allheads = set(repo.heads())
1150 1150 branches = []
1151 1151 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1152 1152 isactive = not isclosed and bool(set(heads) & allheads)
1153 1153 branches.append((tag, repo[tip], isactive, not isclosed))
1154 1154 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1155 1155 reverse=True)
1156 1156
1157 1157 for tag, ctx, isactive, isopen in branches:
1158 1158 if active and not isactive:
1159 1159 continue
1160 1160 if isactive:
1161 1161 label = 'branches.active'
1162 1162 notice = ''
1163 1163 elif not isopen:
1164 1164 if not closed:
1165 1165 continue
1166 1166 label = 'branches.closed'
1167 1167 notice = _(' (closed)')
1168 1168 else:
1169 1169 label = 'branches.inactive'
1170 1170 notice = _(' (inactive)')
1171 1171 current = (tag == repo.dirstate.branch())
1172 1172 if current:
1173 1173 label = 'branches.current'
1174 1174
1175 1175 fm.startitem()
1176 1176 fm.write('branch', '%s', tag, label=label)
1177 1177 rev = ctx.rev()
1178 1178 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1179 1179 fmt = ' ' * padsize + ' %d:%s'
1180 1180 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1181 1181 label='log.changeset changeset.%s' % ctx.phasestr())
1182 1182 fm.data(active=isactive, closed=not isopen, current=current)
1183 1183 if not ui.quiet:
1184 1184 fm.plain(notice)
1185 1185 fm.plain('\n')
1186 1186 fm.end()
1187 1187
1188 1188 @command('bundle',
1189 1189 [('f', 'force', None, _('run even when the destination is unrelated')),
1190 1190 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1191 1191 _('REV')),
1192 1192 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1193 1193 _('BRANCH')),
1194 1194 ('', 'base', [],
1195 1195 _('a base changeset assumed to be available at the destination'),
1196 1196 _('REV')),
1197 1197 ('a', 'all', None, _('bundle all changesets in the repository')),
1198 1198 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1199 1199 ] + remoteopts,
1200 1200 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1201 1201 def bundle(ui, repo, fname, dest=None, **opts):
1202 1202 """create a changegroup file
1203 1203
1204 1204 Generate a compressed changegroup file collecting changesets not
1205 1205 known to be in another repository.
1206 1206
1207 1207 If you omit the destination repository, then hg assumes the
1208 1208 destination will have all the nodes you specify with --base
1209 1209 parameters. To create a bundle containing all changesets, use
1210 1210 -a/--all (or --base null).
1211 1211
1212 1212 You can change compression method with the -t/--type option.
1213 1213 The available compression methods are: none, bzip2, and
1214 1214 gzip (by default, bundles are compressed using bzip2).
1215 1215
1216 1216 The bundle file can then be transferred using conventional means
1217 1217 and applied to another repository with the unbundle or pull
1218 1218 command. This is useful when direct push and pull are not
1219 1219 available or when exporting an entire repository is undesirable.
1220 1220
1221 1221 Applying bundles preserves all changeset contents including
1222 1222 permissions, copy/rename information, and revision history.
1223 1223
1224 1224 Returns 0 on success, 1 if no changes found.
1225 1225 """
1226 1226 revs = None
1227 1227 if 'rev' in opts:
1228 1228 revs = scmutil.revrange(repo, opts['rev'])
1229 1229
1230 1230 bundletype = opts.get('type', 'bzip2').lower()
1231 1231 btypes = {'none': 'HG10UN',
1232 1232 'bzip2': 'HG10BZ',
1233 1233 'gzip': 'HG10GZ',
1234 1234 'bundle2': 'HG20'}
1235 1235 bundletype = btypes.get(bundletype)
1236 1236 if bundletype not in changegroup.bundletypes:
1237 1237 raise util.Abort(_('unknown bundle type specified with --type'))
1238 1238
1239 1239 if opts.get('all'):
1240 1240 base = ['null']
1241 1241 else:
1242 1242 base = scmutil.revrange(repo, opts.get('base'))
1243 1243 # TODO: get desired bundlecaps from command line.
1244 1244 bundlecaps = None
1245 1245 if base:
1246 1246 if dest:
1247 1247 raise util.Abort(_("--base is incompatible with specifying "
1248 1248 "a destination"))
1249 1249 common = [repo.lookup(rev) for rev in base]
1250 1250 heads = revs and map(repo.lookup, revs) or revs
1251 1251 cg = changegroup.getchangegroup(repo, 'bundle', heads=heads,
1252 1252 common=common, bundlecaps=bundlecaps)
1253 1253 outgoing = None
1254 1254 else:
1255 1255 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1256 1256 dest, branches = hg.parseurl(dest, opts.get('branch'))
1257 1257 other = hg.peer(repo, opts, dest)
1258 1258 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1259 1259 heads = revs and map(repo.lookup, revs) or revs
1260 1260 outgoing = discovery.findcommonoutgoing(repo, other,
1261 1261 onlyheads=heads,
1262 1262 force=opts.get('force'),
1263 1263 portable=True)
1264 1264 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1265 1265 bundlecaps)
1266 1266 if not cg:
1267 1267 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1268 1268 return 1
1269 1269
1270 1270 changegroup.writebundle(ui, cg, fname, bundletype)
1271 1271
1272 1272 @command('cat',
1273 1273 [('o', 'output', '',
1274 1274 _('print output to file with formatted name'), _('FORMAT')),
1275 1275 ('r', 'rev', '', _('print the given revision'), _('REV')),
1276 1276 ('', 'decode', None, _('apply any matching decode filter')),
1277 1277 ] + walkopts,
1278 1278 _('[OPTION]... FILE...'),
1279 1279 inferrepo=True)
1280 1280 def cat(ui, repo, file1, *pats, **opts):
1281 1281 """output the current or given revision of files
1282 1282
1283 1283 Print the specified files as they were at the given revision. If
1284 1284 no revision is given, the parent of the working directory is used.
1285 1285
1286 1286 Output may be to a file, in which case the name of the file is
1287 1287 given using a format string. The formatting rules as follows:
1288 1288
1289 1289 :``%%``: literal "%" character
1290 1290 :``%s``: basename of file being printed
1291 1291 :``%d``: dirname of file being printed, or '.' if in repository root
1292 1292 :``%p``: root-relative path name of file being printed
1293 1293 :``%H``: changeset hash (40 hexadecimal digits)
1294 1294 :``%R``: changeset revision number
1295 1295 :``%h``: short-form changeset hash (12 hexadecimal digits)
1296 1296 :``%r``: zero-padded changeset revision number
1297 1297 :``%b``: basename of the exporting repository
1298 1298
1299 1299 Returns 0 on success.
1300 1300 """
1301 1301 ctx = scmutil.revsingle(repo, opts.get('rev'))
1302 1302 m = scmutil.match(ctx, (file1,) + pats, opts)
1303 1303
1304 1304 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1305 1305
1306 1306 @command('^clone',
1307 1307 [('U', 'noupdate', None, _('the clone will include an empty working '
1308 1308 'directory (only a repository)')),
1309 1309 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1310 1310 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1311 1311 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1312 1312 ('', 'pull', None, _('use pull protocol to copy metadata')),
1313 1313 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1314 1314 ] + remoteopts,
1315 1315 _('[OPTION]... SOURCE [DEST]'),
1316 1316 norepo=True)
1317 1317 def clone(ui, source, dest=None, **opts):
1318 1318 """make a copy of an existing repository
1319 1319
1320 1320 Create a copy of an existing repository in a new directory.
1321 1321
1322 1322 If no destination directory name is specified, it defaults to the
1323 1323 basename of the source.
1324 1324
1325 1325 The location of the source is added to the new repository's
1326 1326 ``.hg/hgrc`` file, as the default to be used for future pulls.
1327 1327
1328 1328 Only local paths and ``ssh://`` URLs are supported as
1329 1329 destinations. For ``ssh://`` destinations, no working directory or
1330 1330 ``.hg/hgrc`` will be created on the remote side.
1331 1331
1332 1332 To pull only a subset of changesets, specify one or more revisions
1333 1333 identifiers with -r/--rev or branches with -b/--branch. The
1334 1334 resulting clone will contain only the specified changesets and
1335 1335 their ancestors. These options (or 'clone src#rev dest') imply
1336 1336 --pull, even for local source repositories. Note that specifying a
1337 1337 tag will include the tagged changeset but not the changeset
1338 1338 containing the tag.
1339 1339
1340 1340 If the source repository has a bookmark called '@' set, that
1341 1341 revision will be checked out in the new repository by default.
1342 1342
1343 1343 To check out a particular version, use -u/--update, or
1344 1344 -U/--noupdate to create a clone with no working directory.
1345 1345
1346 1346 .. container:: verbose
1347 1347
1348 1348 For efficiency, hardlinks are used for cloning whenever the
1349 1349 source and destination are on the same filesystem (note this
1350 1350 applies only to the repository data, not to the working
1351 1351 directory). Some filesystems, such as AFS, implement hardlinking
1352 1352 incorrectly, but do not report errors. In these cases, use the
1353 1353 --pull option to avoid hardlinking.
1354 1354
1355 1355 In some cases, you can clone repositories and the working
1356 1356 directory using full hardlinks with ::
1357 1357
1358 1358 $ cp -al REPO REPOCLONE
1359 1359
1360 1360 This is the fastest way to clone, but it is not always safe. The
1361 1361 operation is not atomic (making sure REPO is not modified during
1362 1362 the operation is up to you) and you have to make sure your
1363 1363 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1364 1364 so). Also, this is not compatible with certain extensions that
1365 1365 place their metadata under the .hg directory, such as mq.
1366 1366
1367 1367 Mercurial will update the working directory to the first applicable
1368 1368 revision from this list:
1369 1369
1370 1370 a) null if -U or the source repository has no changesets
1371 1371 b) if -u . and the source repository is local, the first parent of
1372 1372 the source repository's working directory
1373 1373 c) the changeset specified with -u (if a branch name, this means the
1374 1374 latest head of that branch)
1375 1375 d) the changeset specified with -r
1376 1376 e) the tipmost head specified with -b
1377 1377 f) the tipmost head specified with the url#branch source syntax
1378 1378 g) the revision marked with the '@' bookmark, if present
1379 1379 h) the tipmost head of the default branch
1380 1380 i) tip
1381 1381
1382 1382 Examples:
1383 1383
1384 1384 - clone a remote repository to a new directory named hg/::
1385 1385
1386 1386 hg clone http://selenic.com/hg
1387 1387
1388 1388 - create a lightweight local clone::
1389 1389
1390 1390 hg clone project/ project-feature/
1391 1391
1392 1392 - clone from an absolute path on an ssh server (note double-slash)::
1393 1393
1394 1394 hg clone ssh://user@server//home/projects/alpha/
1395 1395
1396 1396 - do a high-speed clone over a LAN while checking out a
1397 1397 specified version::
1398 1398
1399 1399 hg clone --uncompressed http://server/repo -u 1.5
1400 1400
1401 1401 - create a repository without changesets after a particular revision::
1402 1402
1403 1403 hg clone -r 04e544 experimental/ good/
1404 1404
1405 1405 - clone (and track) a particular named branch::
1406 1406
1407 1407 hg clone http://selenic.com/hg#stable
1408 1408
1409 1409 See :hg:`help urls` for details on specifying URLs.
1410 1410
1411 1411 Returns 0 on success.
1412 1412 """
1413 1413 if opts.get('noupdate') and opts.get('updaterev'):
1414 1414 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1415 1415
1416 1416 r = hg.clone(ui, opts, source, dest,
1417 1417 pull=opts.get('pull'),
1418 1418 stream=opts.get('uncompressed'),
1419 1419 rev=opts.get('rev'),
1420 1420 update=opts.get('updaterev') or not opts.get('noupdate'),
1421 1421 branch=opts.get('branch'))
1422 1422
1423 1423 return r is None
1424 1424
1425 1425 @command('^commit|ci',
1426 1426 [('A', 'addremove', None,
1427 1427 _('mark new/missing files as added/removed before committing')),
1428 1428 ('', 'close-branch', None,
1429 1429 _('mark a branch head as closed')),
1430 1430 ('', 'amend', None, _('amend the parent of the working directory')),
1431 1431 ('s', 'secret', None, _('use the secret phase for committing')),
1432 1432 ('e', 'edit', None, _('invoke editor on commit messages')),
1433 1433 ('i', 'interactive', None, _('use interactive mode')),
1434 1434 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1435 1435 _('[OPTION]... [FILE]...'),
1436 1436 inferrepo=True)
1437 1437 def commit(ui, repo, *pats, **opts):
1438 1438 """commit the specified files or all outstanding changes
1439 1439
1440 1440 Commit changes to the given files into the repository. Unlike a
1441 1441 centralized SCM, this operation is a local operation. See
1442 1442 :hg:`push` for a way to actively distribute your changes.
1443 1443
1444 1444 If a list of files is omitted, all changes reported by :hg:`status`
1445 1445 will be committed.
1446 1446
1447 1447 If you are committing the result of a merge, do not provide any
1448 1448 filenames or -I/-X filters.
1449 1449
1450 1450 If no commit message is specified, Mercurial starts your
1451 1451 configured editor where you can enter a message. In case your
1452 1452 commit fails, you will find a backup of your message in
1453 1453 ``.hg/last-message.txt``.
1454 1454
1455 1455 The --close-branch flag can be used to mark the current branch
1456 1456 head closed. When all heads of a branch are closed, the branch
1457 1457 will be considered closed and no longer listed.
1458 1458
1459 1459 The --amend flag can be used to amend the parent of the
1460 1460 working directory with a new commit that contains the changes
1461 1461 in the parent in addition to those currently reported by :hg:`status`,
1462 1462 if there are any. The old commit is stored in a backup bundle in
1463 1463 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1464 1464 on how to restore it).
1465 1465
1466 1466 Message, user and date are taken from the amended commit unless
1467 1467 specified. When a message isn't specified on the command line,
1468 1468 the editor will open with the message of the amended commit.
1469 1469
1470 1470 It is not possible to amend public changesets (see :hg:`help phases`)
1471 1471 or changesets that have children.
1472 1472
1473 1473 See :hg:`help dates` for a list of formats valid for -d/--date.
1474 1474
1475 1475 Returns 0 on success, 1 if nothing changed.
1476 1476 """
1477 1477 if opts.get('interactive'):
1478 1478 opts.pop('interactive')
1479 1479 cmdutil.dorecord(ui, repo, commit, 'commit', False,
1480 1480 cmdutil.recordfilter, *pats, **opts)
1481 1481 return
1482 1482
1483 1483 if opts.get('subrepos'):
1484 1484 if opts.get('amend'):
1485 1485 raise util.Abort(_('cannot amend with --subrepos'))
1486 1486 # Let --subrepos on the command line override config setting.
1487 1487 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1488 1488
1489 1489 cmdutil.checkunfinished(repo, commit=True)
1490 1490
1491 1491 branch = repo[None].branch()
1492 1492 bheads = repo.branchheads(branch)
1493 1493
1494 1494 extra = {}
1495 1495 if opts.get('close_branch'):
1496 1496 extra['close'] = 1
1497 1497
1498 1498 if not bheads:
1499 1499 raise util.Abort(_('can only close branch heads'))
1500 1500 elif opts.get('amend'):
1501 1501 if repo.parents()[0].p1().branch() != branch and \
1502 1502 repo.parents()[0].p2().branch() != branch:
1503 1503 raise util.Abort(_('can only close branch heads'))
1504 1504
1505 1505 if opts.get('amend'):
1506 1506 if ui.configbool('ui', 'commitsubrepos'):
1507 1507 raise util.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1508 1508
1509 1509 old = repo['.']
1510 1510 if not old.mutable():
1511 1511 raise util.Abort(_('cannot amend public changesets'))
1512 1512 if len(repo[None].parents()) > 1:
1513 1513 raise util.Abort(_('cannot amend while merging'))
1514 1514 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1515 1515 if not allowunstable and old.children():
1516 1516 raise util.Abort(_('cannot amend changeset with children'))
1517 1517
1518 1518 # commitfunc is used only for temporary amend commit by cmdutil.amend
1519 1519 def commitfunc(ui, repo, message, match, opts):
1520 1520 return repo.commit(message,
1521 1521 opts.get('user') or old.user(),
1522 1522 opts.get('date') or old.date(),
1523 1523 match,
1524 1524 extra=extra)
1525 1525
1526 1526 active = repo._activebookmark
1527 1527 marks = old.bookmarks()
1528 1528 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1529 1529 if node == old.node():
1530 1530 ui.status(_("nothing changed\n"))
1531 1531 return 1
1532 1532 elif marks:
1533 1533 ui.debug('moving bookmarks %r from %s to %s\n' %
1534 1534 (marks, old.hex(), hex(node)))
1535 1535 newmarks = repo._bookmarks
1536 1536 for bm in marks:
1537 1537 newmarks[bm] = node
1538 1538 if bm == active:
1539 1539 bookmarks.activate(repo, bm)
1540 1540 newmarks.write()
1541 1541 else:
1542 1542 def commitfunc(ui, repo, message, match, opts):
1543 1543 backup = ui.backupconfig('phases', 'new-commit')
1544 1544 baseui = repo.baseui
1545 1545 basebackup = baseui.backupconfig('phases', 'new-commit')
1546 1546 try:
1547 1547 if opts.get('secret'):
1548 1548 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1549 1549 # Propagate to subrepos
1550 1550 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1551 1551
1552 1552 editform = cmdutil.mergeeditform(repo[None], 'commit.normal')
1553 1553 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1554 1554 return repo.commit(message, opts.get('user'), opts.get('date'),
1555 1555 match,
1556 1556 editor=editor,
1557 1557 extra=extra)
1558 1558 finally:
1559 1559 ui.restoreconfig(backup)
1560 1560 repo.baseui.restoreconfig(basebackup)
1561 1561
1562 1562
1563 1563 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1564 1564
1565 1565 if not node:
1566 1566 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1567 1567 if stat[3]:
1568 1568 ui.status(_("nothing changed (%d missing files, see "
1569 1569 "'hg status')\n") % len(stat[3]))
1570 1570 else:
1571 1571 ui.status(_("nothing changed\n"))
1572 1572 return 1
1573 1573
1574 1574 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1575 1575
1576 1576 @command('config|showconfig|debugconfig',
1577 1577 [('u', 'untrusted', None, _('show untrusted configuration options')),
1578 1578 ('e', 'edit', None, _('edit user config')),
1579 1579 ('l', 'local', None, _('edit repository config')),
1580 1580 ('g', 'global', None, _('edit global config'))],
1581 1581 _('[-u] [NAME]...'),
1582 1582 optionalrepo=True)
1583 1583 def config(ui, repo, *values, **opts):
1584 1584 """show combined config settings from all hgrc files
1585 1585
1586 1586 With no arguments, print names and values of all config items.
1587 1587
1588 1588 With one argument of the form section.name, print just the value
1589 1589 of that config item.
1590 1590
1591 1591 With multiple arguments, print names and values of all config
1592 1592 items with matching section names.
1593 1593
1594 1594 With --edit, start an editor on the user-level config file. With
1595 1595 --global, edit the system-wide config file. With --local, edit the
1596 1596 repository-level config file.
1597 1597
1598 1598 With --debug, the source (filename and line number) is printed
1599 1599 for each config item.
1600 1600
1601 1601 See :hg:`help config` for more information about config files.
1602 1602
1603 1603 Returns 0 on success, 1 if NAME does not exist.
1604 1604
1605 1605 """
1606 1606
1607 1607 if opts.get('edit') or opts.get('local') or opts.get('global'):
1608 1608 if opts.get('local') and opts.get('global'):
1609 1609 raise util.Abort(_("can't use --local and --global together"))
1610 1610
1611 1611 if opts.get('local'):
1612 1612 if not repo:
1613 1613 raise util.Abort(_("can't use --local outside a repository"))
1614 1614 paths = [repo.join('hgrc')]
1615 1615 elif opts.get('global'):
1616 1616 paths = scmutil.systemrcpath()
1617 1617 else:
1618 1618 paths = scmutil.userrcpath()
1619 1619
1620 1620 for f in paths:
1621 1621 if os.path.exists(f):
1622 1622 break
1623 1623 else:
1624 1624 if opts.get('global'):
1625 1625 samplehgrc = uimod.samplehgrcs['global']
1626 1626 elif opts.get('local'):
1627 1627 samplehgrc = uimod.samplehgrcs['local']
1628 1628 else:
1629 1629 samplehgrc = uimod.samplehgrcs['user']
1630 1630
1631 1631 f = paths[0]
1632 1632 fp = open(f, "w")
1633 1633 fp.write(samplehgrc)
1634 1634 fp.close()
1635 1635
1636 1636 editor = ui.geteditor()
1637 1637 ui.system("%s \"%s\"" % (editor, f),
1638 1638 onerr=util.Abort, errprefix=_("edit failed"))
1639 1639 return
1640 1640
1641 1641 for f in scmutil.rcpath():
1642 1642 ui.debug('read config from: %s\n' % f)
1643 1643 untrusted = bool(opts.get('untrusted'))
1644 1644 if values:
1645 1645 sections = [v for v in values if '.' not in v]
1646 1646 items = [v for v in values if '.' in v]
1647 1647 if len(items) > 1 or items and sections:
1648 1648 raise util.Abort(_('only one config item permitted'))
1649 1649 matched = False
1650 1650 for section, name, value in ui.walkconfig(untrusted=untrusted):
1651 1651 value = str(value).replace('\n', '\\n')
1652 1652 sectname = section + '.' + name
1653 1653 if values:
1654 1654 for v in values:
1655 1655 if v == section:
1656 1656 ui.debug('%s: ' %
1657 1657 ui.configsource(section, name, untrusted))
1658 1658 ui.write('%s=%s\n' % (sectname, value))
1659 1659 matched = True
1660 1660 elif v == sectname:
1661 1661 ui.debug('%s: ' %
1662 1662 ui.configsource(section, name, untrusted))
1663 1663 ui.write(value, '\n')
1664 1664 matched = True
1665 1665 else:
1666 1666 ui.debug('%s: ' %
1667 1667 ui.configsource(section, name, untrusted))
1668 1668 ui.write('%s=%s\n' % (sectname, value))
1669 1669 matched = True
1670 1670 if matched:
1671 1671 return 0
1672 1672 return 1
1673 1673
1674 1674 @command('copy|cp',
1675 1675 [('A', 'after', None, _('record a copy that has already occurred')),
1676 1676 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1677 1677 ] + walkopts + dryrunopts,
1678 1678 _('[OPTION]... [SOURCE]... DEST'))
1679 1679 def copy(ui, repo, *pats, **opts):
1680 1680 """mark files as copied for the next commit
1681 1681
1682 1682 Mark dest as having copies of source files. If dest is a
1683 1683 directory, copies are put in that directory. If dest is a file,
1684 1684 the source must be a single file.
1685 1685
1686 1686 By default, this command copies the contents of files as they
1687 1687 exist in the working directory. If invoked with -A/--after, the
1688 1688 operation is recorded, but no copying is performed.
1689 1689
1690 1690 This command takes effect with the next commit. To undo a copy
1691 1691 before that, see :hg:`revert`.
1692 1692
1693 1693 Returns 0 on success, 1 if errors are encountered.
1694 1694 """
1695 1695 wlock = repo.wlock(False)
1696 1696 try:
1697 1697 return cmdutil.copy(ui, repo, pats, opts)
1698 1698 finally:
1699 1699 wlock.release()
1700 1700
1701 1701 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
1702 1702 def debugancestor(ui, repo, *args):
1703 1703 """find the ancestor revision of two revisions in a given index"""
1704 1704 if len(args) == 3:
1705 1705 index, rev1, rev2 = args
1706 1706 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1707 1707 lookup = r.lookup
1708 1708 elif len(args) == 2:
1709 1709 if not repo:
1710 1710 raise util.Abort(_("there is no Mercurial repository here "
1711 1711 "(.hg not found)"))
1712 1712 rev1, rev2 = args
1713 1713 r = repo.changelog
1714 1714 lookup = repo.lookup
1715 1715 else:
1716 1716 raise util.Abort(_('either two or three arguments required'))
1717 1717 a = r.ancestor(lookup(rev1), lookup(rev2))
1718 1718 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1719 1719
1720 1720 @command('debugbuilddag',
1721 1721 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1722 1722 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1723 1723 ('n', 'new-file', None, _('add new file at each rev'))],
1724 1724 _('[OPTION]... [TEXT]'))
1725 1725 def debugbuilddag(ui, repo, text=None,
1726 1726 mergeable_file=False,
1727 1727 overwritten_file=False,
1728 1728 new_file=False):
1729 1729 """builds a repo with a given DAG from scratch in the current empty repo
1730 1730
1731 1731 The description of the DAG is read from stdin if not given on the
1732 1732 command line.
1733 1733
1734 1734 Elements:
1735 1735
1736 1736 - "+n" is a linear run of n nodes based on the current default parent
1737 1737 - "." is a single node based on the current default parent
1738 1738 - "$" resets the default parent to null (implied at the start);
1739 1739 otherwise the default parent is always the last node created
1740 1740 - "<p" sets the default parent to the backref p
1741 1741 - "*p" is a fork at parent p, which is a backref
1742 1742 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1743 1743 - "/p2" is a merge of the preceding node and p2
1744 1744 - ":tag" defines a local tag for the preceding node
1745 1745 - "@branch" sets the named branch for subsequent nodes
1746 1746 - "#...\\n" is a comment up to the end of the line
1747 1747
1748 1748 Whitespace between the above elements is ignored.
1749 1749
1750 1750 A backref is either
1751 1751
1752 1752 - a number n, which references the node curr-n, where curr is the current
1753 1753 node, or
1754 1754 - the name of a local tag you placed earlier using ":tag", or
1755 1755 - empty to denote the default parent.
1756 1756
1757 1757 All string valued-elements are either strictly alphanumeric, or must
1758 1758 be enclosed in double quotes ("..."), with "\\" as escape character.
1759 1759 """
1760 1760
1761 1761 if text is None:
1762 1762 ui.status(_("reading DAG from stdin\n"))
1763 1763 text = ui.fin.read()
1764 1764
1765 1765 cl = repo.changelog
1766 1766 if len(cl) > 0:
1767 1767 raise util.Abort(_('repository is not empty'))
1768 1768
1769 1769 # determine number of revs in DAG
1770 1770 total = 0
1771 1771 for type, data in dagparser.parsedag(text):
1772 1772 if type == 'n':
1773 1773 total += 1
1774 1774
1775 1775 if mergeable_file:
1776 1776 linesperrev = 2
1777 1777 # make a file with k lines per rev
1778 1778 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1779 1779 initialmergedlines.append("")
1780 1780
1781 1781 tags = []
1782 1782
1783 1783 lock = tr = None
1784 1784 try:
1785 1785 lock = repo.lock()
1786 1786 tr = repo.transaction("builddag")
1787 1787
1788 1788 at = -1
1789 1789 atbranch = 'default'
1790 1790 nodeids = []
1791 1791 id = 0
1792 1792 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1793 1793 for type, data in dagparser.parsedag(text):
1794 1794 if type == 'n':
1795 1795 ui.note(('node %s\n' % str(data)))
1796 1796 id, ps = data
1797 1797
1798 1798 files = []
1799 1799 fctxs = {}
1800 1800
1801 1801 p2 = None
1802 1802 if mergeable_file:
1803 1803 fn = "mf"
1804 1804 p1 = repo[ps[0]]
1805 1805 if len(ps) > 1:
1806 1806 p2 = repo[ps[1]]
1807 1807 pa = p1.ancestor(p2)
1808 1808 base, local, other = [x[fn].data() for x in (pa, p1,
1809 1809 p2)]
1810 1810 m3 = simplemerge.Merge3Text(base, local, other)
1811 1811 ml = [l.strip() for l in m3.merge_lines()]
1812 1812 ml.append("")
1813 1813 elif at > 0:
1814 1814 ml = p1[fn].data().split("\n")
1815 1815 else:
1816 1816 ml = initialmergedlines
1817 1817 ml[id * linesperrev] += " r%i" % id
1818 1818 mergedtext = "\n".join(ml)
1819 1819 files.append(fn)
1820 1820 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
1821 1821
1822 1822 if overwritten_file:
1823 1823 fn = "of"
1824 1824 files.append(fn)
1825 1825 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1826 1826
1827 1827 if new_file:
1828 1828 fn = "nf%i" % id
1829 1829 files.append(fn)
1830 1830 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1831 1831 if len(ps) > 1:
1832 1832 if not p2:
1833 1833 p2 = repo[ps[1]]
1834 1834 for fn in p2:
1835 1835 if fn.startswith("nf"):
1836 1836 files.append(fn)
1837 1837 fctxs[fn] = p2[fn]
1838 1838
1839 1839 def fctxfn(repo, cx, path):
1840 1840 return fctxs.get(path)
1841 1841
1842 1842 if len(ps) == 0 or ps[0] < 0:
1843 1843 pars = [None, None]
1844 1844 elif len(ps) == 1:
1845 1845 pars = [nodeids[ps[0]], None]
1846 1846 else:
1847 1847 pars = [nodeids[p] for p in ps]
1848 1848 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1849 1849 date=(id, 0),
1850 1850 user="debugbuilddag",
1851 1851 extra={'branch': atbranch})
1852 1852 nodeid = repo.commitctx(cx)
1853 1853 nodeids.append(nodeid)
1854 1854 at = id
1855 1855 elif type == 'l':
1856 1856 id, name = data
1857 1857 ui.note(('tag %s\n' % name))
1858 1858 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1859 1859 elif type == 'a':
1860 1860 ui.note(('branch %s\n' % data))
1861 1861 atbranch = data
1862 1862 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1863 1863 tr.close()
1864 1864
1865 1865 if tags:
1866 1866 repo.vfs.write("localtags", "".join(tags))
1867 1867 finally:
1868 1868 ui.progress(_('building'), None)
1869 1869 release(tr, lock)
1870 1870
1871 1871 @command('debugbundle',
1872 1872 [('a', 'all', None, _('show all details'))],
1873 1873 _('FILE'),
1874 1874 norepo=True)
1875 1875 def debugbundle(ui, bundlepath, all=None, **opts):
1876 1876 """lists the contents of a bundle"""
1877 1877 f = hg.openpath(ui, bundlepath)
1878 1878 try:
1879 1879 gen = exchange.readbundle(ui, f, bundlepath)
1880 1880 if isinstance(gen, bundle2.unbundle20):
1881 1881 return _debugbundle2(ui, gen, all=all, **opts)
1882 1882 if all:
1883 1883 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1884 1884
1885 1885 def showchunks(named):
1886 1886 ui.write("\n%s\n" % named)
1887 1887 chain = None
1888 1888 while True:
1889 1889 chunkdata = gen.deltachunk(chain)
1890 1890 if not chunkdata:
1891 1891 break
1892 1892 node = chunkdata['node']
1893 1893 p1 = chunkdata['p1']
1894 1894 p2 = chunkdata['p2']
1895 1895 cs = chunkdata['cs']
1896 1896 deltabase = chunkdata['deltabase']
1897 1897 delta = chunkdata['delta']
1898 1898 ui.write("%s %s %s %s %s %s\n" %
1899 1899 (hex(node), hex(p1), hex(p2),
1900 1900 hex(cs), hex(deltabase), len(delta)))
1901 1901 chain = node
1902 1902
1903 1903 chunkdata = gen.changelogheader()
1904 1904 showchunks("changelog")
1905 1905 chunkdata = gen.manifestheader()
1906 1906 showchunks("manifest")
1907 1907 while True:
1908 1908 chunkdata = gen.filelogheader()
1909 1909 if not chunkdata:
1910 1910 break
1911 1911 fname = chunkdata['filename']
1912 1912 showchunks(fname)
1913 1913 else:
1914 1914 if isinstance(gen, bundle2.unbundle20):
1915 1915 raise util.Abort(_('use debugbundle2 for this file'))
1916 1916 chunkdata = gen.changelogheader()
1917 1917 chain = None
1918 1918 while True:
1919 1919 chunkdata = gen.deltachunk(chain)
1920 1920 if not chunkdata:
1921 1921 break
1922 1922 node = chunkdata['node']
1923 1923 ui.write("%s\n" % hex(node))
1924 1924 chain = node
1925 1925 finally:
1926 1926 f.close()
1927 1927
1928 1928 def _debugbundle2(ui, gen, **opts):
1929 1929 """lists the contents of a bundle2"""
1930 1930 if not isinstance(gen, bundle2.unbundle20):
1931 1931 raise util.Abort(_('not a bundle2 file'))
1932 1932 ui.write(('Stream params: %s\n' % repr(gen.params)))
1933 1933 for part in gen.iterparts():
1934 1934 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
1935 1935 if part.type == 'changegroup':
1936 1936 version = part.params.get('version', '01')
1937 1937 cg = changegroup.packermap[version][1](part, 'UN')
1938 1938 chunkdata = cg.changelogheader()
1939 1939 chain = None
1940 1940 while True:
1941 1941 chunkdata = cg.deltachunk(chain)
1942 1942 if not chunkdata:
1943 1943 break
1944 1944 node = chunkdata['node']
1945 1945 ui.write(" %s\n" % hex(node))
1946 1946 chain = node
1947 1947
1948 1948 @command('debugcheckstate', [], '')
1949 1949 def debugcheckstate(ui, repo):
1950 1950 """validate the correctness of the current dirstate"""
1951 1951 parent1, parent2 = repo.dirstate.parents()
1952 1952 m1 = repo[parent1].manifest()
1953 1953 m2 = repo[parent2].manifest()
1954 1954 errors = 0
1955 1955 for f in repo.dirstate:
1956 1956 state = repo.dirstate[f]
1957 1957 if state in "nr" and f not in m1:
1958 1958 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1959 1959 errors += 1
1960 1960 if state in "a" and f in m1:
1961 1961 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1962 1962 errors += 1
1963 1963 if state in "m" and f not in m1 and f not in m2:
1964 1964 ui.warn(_("%s in state %s, but not in either manifest\n") %
1965 1965 (f, state))
1966 1966 errors += 1
1967 1967 for f in m1:
1968 1968 state = repo.dirstate[f]
1969 1969 if state not in "nrm":
1970 1970 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1971 1971 errors += 1
1972 1972 if errors:
1973 1973 error = _(".hg/dirstate inconsistent with current parent's manifest")
1974 1974 raise util.Abort(error)
1975 1975
1976 1976 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1977 1977 def debugcommands(ui, cmd='', *args):
1978 1978 """list all available commands and options"""
1979 1979 for cmd, vals in sorted(table.iteritems()):
1980 1980 cmd = cmd.split('|')[0].strip('^')
1981 1981 opts = ', '.join([i[1] for i in vals[1]])
1982 1982 ui.write('%s: %s\n' % (cmd, opts))
1983 1983
1984 1984 @command('debugcomplete',
1985 1985 [('o', 'options', None, _('show the command options'))],
1986 1986 _('[-o] CMD'),
1987 1987 norepo=True)
1988 1988 def debugcomplete(ui, cmd='', **opts):
1989 1989 """returns the completion list associated with the given command"""
1990 1990
1991 1991 if opts.get('options'):
1992 1992 options = []
1993 1993 otables = [globalopts]
1994 1994 if cmd:
1995 1995 aliases, entry = cmdutil.findcmd(cmd, table, False)
1996 1996 otables.append(entry[1])
1997 1997 for t in otables:
1998 1998 for o in t:
1999 1999 if "(DEPRECATED)" in o[3]:
2000 2000 continue
2001 2001 if o[0]:
2002 2002 options.append('-%s' % o[0])
2003 2003 options.append('--%s' % o[1])
2004 2004 ui.write("%s\n" % "\n".join(options))
2005 2005 return
2006 2006
2007 2007 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
2008 2008 if ui.verbose:
2009 2009 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
2010 2010 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
2011 2011
2012 2012 @command('debugdag',
2013 2013 [('t', 'tags', None, _('use tags as labels')),
2014 2014 ('b', 'branches', None, _('annotate with branch names')),
2015 2015 ('', 'dots', None, _('use dots for runs')),
2016 2016 ('s', 'spaces', None, _('separate elements by spaces'))],
2017 2017 _('[OPTION]... [FILE [REV]...]'),
2018 2018 optionalrepo=True)
2019 2019 def debugdag(ui, repo, file_=None, *revs, **opts):
2020 2020 """format the changelog or an index DAG as a concise textual description
2021 2021
2022 2022 If you pass a revlog index, the revlog's DAG is emitted. If you list
2023 2023 revision numbers, they get labeled in the output as rN.
2024 2024
2025 2025 Otherwise, the changelog DAG of the current repo is emitted.
2026 2026 """
2027 2027 spaces = opts.get('spaces')
2028 2028 dots = opts.get('dots')
2029 2029 if file_:
2030 2030 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2031 2031 revs = set((int(r) for r in revs))
2032 2032 def events():
2033 2033 for r in rlog:
2034 2034 yield 'n', (r, list(p for p in rlog.parentrevs(r)
2035 2035 if p != -1))
2036 2036 if r in revs:
2037 2037 yield 'l', (r, "r%i" % r)
2038 2038 elif repo:
2039 2039 cl = repo.changelog
2040 2040 tags = opts.get('tags')
2041 2041 branches = opts.get('branches')
2042 2042 if tags:
2043 2043 labels = {}
2044 2044 for l, n in repo.tags().items():
2045 2045 labels.setdefault(cl.rev(n), []).append(l)
2046 2046 def events():
2047 2047 b = "default"
2048 2048 for r in cl:
2049 2049 if branches:
2050 2050 newb = cl.read(cl.node(r))[5]['branch']
2051 2051 if newb != b:
2052 2052 yield 'a', newb
2053 2053 b = newb
2054 2054 yield 'n', (r, list(p for p in cl.parentrevs(r)
2055 2055 if p != -1))
2056 2056 if tags:
2057 2057 ls = labels.get(r)
2058 2058 if ls:
2059 2059 for l in ls:
2060 2060 yield 'l', (r, l)
2061 2061 else:
2062 2062 raise util.Abort(_('need repo for changelog dag'))
2063 2063
2064 2064 for line in dagparser.dagtextlines(events(),
2065 2065 addspaces=spaces,
2066 2066 wraplabels=True,
2067 2067 wrapannotations=True,
2068 2068 wrapnonlinear=dots,
2069 2069 usedots=dots,
2070 2070 maxlinewidth=70):
2071 2071 ui.write(line)
2072 2072 ui.write("\n")
2073 2073
2074 2074 @command('debugdata',
2075 2075 [('c', 'changelog', False, _('open changelog')),
2076 2076 ('m', 'manifest', False, _('open manifest')),
2077 2077 ('', 'dir', False, _('open directory manifest'))],
2078 2078 _('-c|-m|FILE REV'))
2079 2079 def debugdata(ui, repo, file_, rev=None, **opts):
2080 2080 """dump the contents of a data file revision"""
2081 2081 if opts.get('changelog') or opts.get('manifest'):
2082 2082 file_, rev = None, file_
2083 2083 elif rev is None:
2084 2084 raise error.CommandError('debugdata', _('invalid arguments'))
2085 2085 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
2086 2086 try:
2087 2087 ui.write(r.revision(r.lookup(rev)))
2088 2088 except KeyError:
2089 2089 raise util.Abort(_('invalid revision identifier %s') % rev)
2090 2090
2091 2091 @command('debugdate',
2092 2092 [('e', 'extended', None, _('try extended date formats'))],
2093 2093 _('[-e] DATE [RANGE]'),
2094 2094 norepo=True, optionalrepo=True)
2095 2095 def debugdate(ui, date, range=None, **opts):
2096 2096 """parse and display a date"""
2097 2097 if opts["extended"]:
2098 2098 d = util.parsedate(date, util.extendeddateformats)
2099 2099 else:
2100 2100 d = util.parsedate(date)
2101 2101 ui.write(("internal: %s %s\n") % d)
2102 2102 ui.write(("standard: %s\n") % util.datestr(d))
2103 2103 if range:
2104 2104 m = util.matchdate(range)
2105 2105 ui.write(("match: %s\n") % m(d[0]))
2106 2106
2107 2107 @command('debugdiscovery',
2108 2108 [('', 'old', None, _('use old-style discovery')),
2109 2109 ('', 'nonheads', None,
2110 2110 _('use old-style discovery with non-heads included')),
2111 2111 ] + remoteopts,
2112 2112 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
2113 2113 def debugdiscovery(ui, repo, remoteurl="default", **opts):
2114 2114 """runs the changeset discovery protocol in isolation"""
2115 2115 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
2116 2116 opts.get('branch'))
2117 2117 remote = hg.peer(repo, opts, remoteurl)
2118 2118 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
2119 2119
2120 2120 # make sure tests are repeatable
2121 2121 random.seed(12323)
2122 2122
2123 2123 def doit(localheads, remoteheads, remote=remote):
2124 2124 if opts.get('old'):
2125 2125 if localheads:
2126 2126 raise util.Abort('cannot use localheads with old style '
2127 2127 'discovery')
2128 2128 if not util.safehasattr(remote, 'branches'):
2129 2129 # enable in-client legacy support
2130 2130 remote = localrepo.locallegacypeer(remote.local())
2131 2131 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
2132 2132 force=True)
2133 2133 common = set(common)
2134 2134 if not opts.get('nonheads'):
2135 2135 ui.write(("unpruned common: %s\n") %
2136 2136 " ".join(sorted(short(n) for n in common)))
2137 2137 dag = dagutil.revlogdag(repo.changelog)
2138 2138 all = dag.ancestorset(dag.internalizeall(common))
2139 2139 common = dag.externalizeall(dag.headsetofconnecteds(all))
2140 2140 else:
2141 2141 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2142 2142 common = set(common)
2143 2143 rheads = set(hds)
2144 2144 lheads = set(repo.heads())
2145 2145 ui.write(("common heads: %s\n") %
2146 2146 " ".join(sorted(short(n) for n in common)))
2147 2147 if lheads <= common:
2148 2148 ui.write(("local is subset\n"))
2149 2149 elif rheads <= common:
2150 2150 ui.write(("remote is subset\n"))
2151 2151
2152 2152 serverlogs = opts.get('serverlog')
2153 2153 if serverlogs:
2154 2154 for filename in serverlogs:
2155 2155 logfile = open(filename, 'r')
2156 2156 try:
2157 2157 line = logfile.readline()
2158 2158 while line:
2159 2159 parts = line.strip().split(';')
2160 2160 op = parts[1]
2161 2161 if op == 'cg':
2162 2162 pass
2163 2163 elif op == 'cgss':
2164 2164 doit(parts[2].split(' '), parts[3].split(' '))
2165 2165 elif op == 'unb':
2166 2166 doit(parts[3].split(' '), parts[2].split(' '))
2167 2167 line = logfile.readline()
2168 2168 finally:
2169 2169 logfile.close()
2170 2170
2171 2171 else:
2172 2172 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2173 2173 opts.get('remote_head'))
2174 2174 localrevs = opts.get('local_head')
2175 2175 doit(localrevs, remoterevs)
2176 2176
2177 2177 @command('debugfileset',
2178 2178 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2179 2179 _('[-r REV] FILESPEC'))
2180 2180 def debugfileset(ui, repo, expr, **opts):
2181 2181 '''parse and apply a fileset specification'''
2182 2182 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2183 2183 if ui.verbose:
2184 2184 tree = fileset.parse(expr)
2185 2185 ui.note(fileset.prettyformat(tree), "\n")
2186 2186
2187 2187 for f in ctx.getfileset(expr):
2188 2188 ui.write("%s\n" % f)
2189 2189
2190 2190 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
2191 2191 def debugfsinfo(ui, path="."):
2192 2192 """show information detected about current filesystem"""
2193 2193 util.writefile('.debugfsinfo', '')
2194 2194 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2195 2195 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2196 2196 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2197 2197 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2198 2198 and 'yes' or 'no'))
2199 2199 os.unlink('.debugfsinfo')
2200 2200
2201 2201 @command('debuggetbundle',
2202 2202 [('H', 'head', [], _('id of head node'), _('ID')),
2203 2203 ('C', 'common', [], _('id of common node'), _('ID')),
2204 2204 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2205 2205 _('REPO FILE [-H|-C ID]...'),
2206 2206 norepo=True)
2207 2207 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2208 2208 """retrieves a bundle from a repo
2209 2209
2210 2210 Every ID must be a full-length hex node id string. Saves the bundle to the
2211 2211 given file.
2212 2212 """
2213 2213 repo = hg.peer(ui, opts, repopath)
2214 2214 if not repo.capable('getbundle'):
2215 2215 raise util.Abort("getbundle() not supported by target repository")
2216 2216 args = {}
2217 2217 if common:
2218 2218 args['common'] = [bin(s) for s in common]
2219 2219 if head:
2220 2220 args['heads'] = [bin(s) for s in head]
2221 2221 # TODO: get desired bundlecaps from command line.
2222 2222 args['bundlecaps'] = None
2223 2223 bundle = repo.getbundle('debug', **args)
2224 2224
2225 2225 bundletype = opts.get('type', 'bzip2').lower()
2226 2226 btypes = {'none': 'HG10UN',
2227 2227 'bzip2': 'HG10BZ',
2228 2228 'gzip': 'HG10GZ',
2229 2229 'bundle2': 'HG20'}
2230 2230 bundletype = btypes.get(bundletype)
2231 2231 if bundletype not in changegroup.bundletypes:
2232 2232 raise util.Abort(_('unknown bundle type specified with --type'))
2233 2233 changegroup.writebundle(ui, bundle, bundlepath, bundletype)
2234 2234
2235 2235 @command('debugignore', [], '')
2236 2236 def debugignore(ui, repo, *values, **opts):
2237 2237 """display the combined ignore pattern"""
2238 2238 ignore = repo.dirstate._ignore
2239 2239 includepat = getattr(ignore, 'includepat', None)
2240 2240 if includepat is not None:
2241 2241 ui.write("%s\n" % includepat)
2242 2242 else:
2243 2243 raise util.Abort(_("no ignore patterns found"))
2244 2244
2245 2245 @command('debugindex',
2246 2246 [('c', 'changelog', False, _('open changelog')),
2247 2247 ('m', 'manifest', False, _('open manifest')),
2248 2248 ('', 'dir', False, _('open directory manifest')),
2249 2249 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2250 2250 _('[-f FORMAT] -c|-m|FILE'),
2251 2251 optionalrepo=True)
2252 2252 def debugindex(ui, repo, file_=None, **opts):
2253 2253 """dump the contents of an index file"""
2254 2254 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2255 2255 format = opts.get('format', 0)
2256 2256 if format not in (0, 1):
2257 2257 raise util.Abort(_("unknown format %d") % format)
2258 2258
2259 2259 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2260 2260 if generaldelta:
2261 2261 basehdr = ' delta'
2262 2262 else:
2263 2263 basehdr = ' base'
2264 2264
2265 2265 if ui.debugflag:
2266 2266 shortfn = hex
2267 2267 else:
2268 2268 shortfn = short
2269 2269
2270 2270 # There might not be anything in r, so have a sane default
2271 2271 idlen = 12
2272 2272 for i in r:
2273 2273 idlen = len(shortfn(r.node(i)))
2274 2274 break
2275 2275
2276 2276 if format == 0:
2277 2277 ui.write(" rev offset length " + basehdr + " linkrev"
2278 2278 " %s %s p2\n" % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
2279 2279 elif format == 1:
2280 2280 ui.write(" rev flag offset length"
2281 2281 " size " + basehdr + " link p1 p2"
2282 2282 " %s\n" % "nodeid".rjust(idlen))
2283 2283
2284 2284 for i in r:
2285 2285 node = r.node(i)
2286 2286 if generaldelta:
2287 2287 base = r.deltaparent(i)
2288 2288 else:
2289 2289 base = r.chainbase(i)
2290 2290 if format == 0:
2291 2291 try:
2292 2292 pp = r.parents(node)
2293 2293 except Exception:
2294 2294 pp = [nullid, nullid]
2295 2295 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2296 2296 i, r.start(i), r.length(i), base, r.linkrev(i),
2297 2297 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2298 2298 elif format == 1:
2299 2299 pr = r.parentrevs(i)
2300 2300 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2301 2301 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2302 2302 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
2303 2303
2304 2304 @command('debugindexdot', [], _('FILE'), optionalrepo=True)
2305 2305 def debugindexdot(ui, repo, file_):
2306 2306 """dump an index DAG as a graphviz dot file"""
2307 2307 r = None
2308 2308 if repo:
2309 2309 filelog = repo.file(file_)
2310 2310 if len(filelog):
2311 2311 r = filelog
2312 2312 if not r:
2313 2313 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2314 2314 ui.write(("digraph G {\n"))
2315 2315 for i in r:
2316 2316 node = r.node(i)
2317 2317 pp = r.parents(node)
2318 2318 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2319 2319 if pp[1] != nullid:
2320 2320 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2321 2321 ui.write("}\n")
2322 2322
2323 2323 @command('debuginstall', [], '', norepo=True)
2324 2324 def debuginstall(ui):
2325 2325 '''test Mercurial installation
2326 2326
2327 2327 Returns 0 on success.
2328 2328 '''
2329 2329
2330 2330 def writetemp(contents):
2331 2331 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2332 2332 f = os.fdopen(fd, "wb")
2333 2333 f.write(contents)
2334 2334 f.close()
2335 2335 return name
2336 2336
2337 2337 problems = 0
2338 2338
2339 2339 # encoding
2340 2340 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2341 2341 try:
2342 2342 encoding.fromlocal("test")
2343 2343 except util.Abort, inst:
2344 2344 ui.write(" %s\n" % inst)
2345 2345 ui.write(_(" (check that your locale is properly set)\n"))
2346 2346 problems += 1
2347 2347
2348 2348 # Python
2349 2349 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2350 2350 ui.status(_("checking Python version (%s)\n")
2351 2351 % ("%s.%s.%s" % sys.version_info[:3]))
2352 2352 ui.status(_("checking Python lib (%s)...\n")
2353 2353 % os.path.dirname(os.__file__))
2354 2354
2355 2355 # compiled modules
2356 2356 ui.status(_("checking installed modules (%s)...\n")
2357 2357 % os.path.dirname(__file__))
2358 2358 try:
2359 2359 import bdiff, mpatch, base85, osutil
2360 2360 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2361 2361 except Exception, inst:
2362 2362 ui.write(" %s\n" % inst)
2363 2363 ui.write(_(" One or more extensions could not be found"))
2364 2364 ui.write(_(" (check that you compiled the extensions)\n"))
2365 2365 problems += 1
2366 2366
2367 2367 # templates
2368 2368 import templater
2369 2369 p = templater.templatepaths()
2370 2370 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2371 2371 if p:
2372 2372 m = templater.templatepath("map-cmdline.default")
2373 2373 if m:
2374 2374 # template found, check if it is working
2375 2375 try:
2376 2376 templater.templater(m)
2377 2377 except Exception, inst:
2378 2378 ui.write(" %s\n" % inst)
2379 2379 p = None
2380 2380 else:
2381 2381 ui.write(_(" template 'default' not found\n"))
2382 2382 p = None
2383 2383 else:
2384 2384 ui.write(_(" no template directories found\n"))
2385 2385 if not p:
2386 2386 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2387 2387 problems += 1
2388 2388
2389 2389 # editor
2390 2390 ui.status(_("checking commit editor...\n"))
2391 2391 editor = ui.geteditor()
2392 2392 editor = util.expandpath(editor)
2393 2393 cmdpath = util.findexe(shlex.split(editor)[0])
2394 2394 if not cmdpath:
2395 2395 if editor == 'vi':
2396 2396 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2397 2397 ui.write(_(" (specify a commit editor in your configuration"
2398 2398 " file)\n"))
2399 2399 else:
2400 2400 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2401 2401 ui.write(_(" (specify a commit editor in your configuration"
2402 2402 " file)\n"))
2403 2403 problems += 1
2404 2404
2405 2405 # check username
2406 2406 ui.status(_("checking username...\n"))
2407 2407 try:
2408 2408 ui.username()
2409 2409 except util.Abort, e:
2410 2410 ui.write(" %s\n" % e)
2411 2411 ui.write(_(" (specify a username in your configuration file)\n"))
2412 2412 problems += 1
2413 2413
2414 2414 if not problems:
2415 2415 ui.status(_("no problems detected\n"))
2416 2416 else:
2417 2417 ui.write(_("%s problems detected,"
2418 2418 " please check your install!\n") % problems)
2419 2419
2420 2420 return problems
2421 2421
2422 2422 @command('debugknown', [], _('REPO ID...'), norepo=True)
2423 2423 def debugknown(ui, repopath, *ids, **opts):
2424 2424 """test whether node ids are known to a repo
2425 2425
2426 2426 Every ID must be a full-length hex node id string. Returns a list of 0s
2427 2427 and 1s indicating unknown/known.
2428 2428 """
2429 2429 repo = hg.peer(ui, opts, repopath)
2430 2430 if not repo.capable('known'):
2431 2431 raise util.Abort("known() not supported by target repository")
2432 2432 flags = repo.known([bin(s) for s in ids])
2433 2433 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2434 2434
2435 2435 @command('debuglabelcomplete', [], _('LABEL...'))
2436 2436 def debuglabelcomplete(ui, repo, *args):
2437 2437 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2438 2438 debugnamecomplete(ui, repo, *args)
2439 2439
2440 2440 @command('debugnamecomplete', [], _('NAME...'))
2441 2441 def debugnamecomplete(ui, repo, *args):
2442 2442 '''complete "names" - tags, open branch names, bookmark names'''
2443 2443
2444 2444 names = set()
2445 2445 # since we previously only listed open branches, we will handle that
2446 2446 # specially (after this for loop)
2447 2447 for name, ns in repo.names.iteritems():
2448 2448 if name != 'branches':
2449 2449 names.update(ns.listnames(repo))
2450 2450 names.update(tag for (tag, heads, tip, closed)
2451 2451 in repo.branchmap().iterbranches() if not closed)
2452 2452 completions = set()
2453 2453 if not args:
2454 2454 args = ['']
2455 2455 for a in args:
2456 2456 completions.update(n for n in names if n.startswith(a))
2457 2457 ui.write('\n'.join(sorted(completions)))
2458 2458 ui.write('\n')
2459 2459
2460 2460 @command('debuglocks',
2461 2461 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
2462 2462 ('W', 'force-wlock', None,
2463 2463 _('free the working state lock (DANGEROUS)'))],
2464 2464 _('[OPTION]...'))
2465 2465 def debuglocks(ui, repo, **opts):
2466 2466 """show or modify state of locks
2467 2467
2468 2468 By default, this command will show which locks are held. This
2469 2469 includes the user and process holding the lock, the amount of time
2470 2470 the lock has been held, and the machine name where the process is
2471 2471 running if it's not local.
2472 2472
2473 2473 Locks protect the integrity of Mercurial's data, so should be
2474 2474 treated with care. System crashes or other interruptions may cause
2475 2475 locks to not be properly released, though Mercurial will usually
2476 2476 detect and remove such stale locks automatically.
2477 2477
2478 2478 However, detecting stale locks may not always be possible (for
2479 2479 instance, on a shared filesystem). Removing locks may also be
2480 2480 blocked by filesystem permissions.
2481 2481
2482 2482 Returns 0 if no locks are held.
2483 2483
2484 2484 """
2485 2485
2486 2486 if opts.get('force_lock'):
2487 2487 repo.svfs.unlink('lock')
2488 2488 if opts.get('force_wlock'):
2489 2489 repo.vfs.unlink('wlock')
2490 2490 if opts.get('force_lock') or opts.get('force_lock'):
2491 2491 return 0
2492 2492
2493 2493 now = time.time()
2494 2494 held = 0
2495 2495
2496 2496 def report(vfs, name, method):
2497 2497 # this causes stale locks to get reaped for more accurate reporting
2498 2498 try:
2499 2499 l = method(False)
2500 2500 except error.LockHeld:
2501 2501 l = None
2502 2502
2503 2503 if l:
2504 2504 l.release()
2505 2505 else:
2506 2506 try:
2507 2507 stat = vfs.lstat(name)
2508 2508 age = now - stat.st_mtime
2509 2509 user = util.username(stat.st_uid)
2510 2510 locker = vfs.readlock(name)
2511 2511 if ":" in locker:
2512 2512 host, pid = locker.split(':')
2513 2513 if host == socket.gethostname():
2514 2514 locker = 'user %s, process %s' % (user, pid)
2515 2515 else:
2516 2516 locker = 'user %s, process %s, host %s' \
2517 2517 % (user, pid, host)
2518 2518 ui.write("%-6s %s (%ds)\n" % (name + ":", locker, age))
2519 2519 return 1
2520 2520 except OSError, e:
2521 2521 if e.errno != errno.ENOENT:
2522 2522 raise
2523 2523
2524 2524 ui.write("%-6s free\n" % (name + ":"))
2525 2525 return 0
2526 2526
2527 2527 held += report(repo.svfs, "lock", repo.lock)
2528 2528 held += report(repo.vfs, "wlock", repo.wlock)
2529 2529
2530 2530 return held
2531 2531
2532 2532 @command('debugobsolete',
2533 2533 [('', 'flags', 0, _('markers flag')),
2534 2534 ('', 'record-parents', False,
2535 2535 _('record parent information for the precursor')),
2536 2536 ('r', 'rev', [], _('display markers relevant to REV')),
2537 2537 ] + commitopts2,
2538 2538 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2539 2539 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2540 2540 """create arbitrary obsolete marker
2541 2541
2542 2542 With no arguments, displays the list of obsolescence markers."""
2543 2543
2544 2544 def parsenodeid(s):
2545 2545 try:
2546 2546 # We do not use revsingle/revrange functions here to accept
2547 2547 # arbitrary node identifiers, possibly not present in the
2548 2548 # local repository.
2549 2549 n = bin(s)
2550 2550 if len(n) != len(nullid):
2551 2551 raise TypeError()
2552 2552 return n
2553 2553 except TypeError:
2554 2554 raise util.Abort('changeset references must be full hexadecimal '
2555 2555 'node identifiers')
2556 2556
2557 2557 if precursor is not None:
2558 2558 if opts['rev']:
2559 2559 raise util.Abort('cannot select revision when creating marker')
2560 2560 metadata = {}
2561 2561 metadata['user'] = opts['user'] or ui.username()
2562 2562 succs = tuple(parsenodeid(succ) for succ in successors)
2563 2563 l = repo.lock()
2564 2564 try:
2565 2565 tr = repo.transaction('debugobsolete')
2566 2566 try:
2567 2567 date = opts.get('date')
2568 2568 if date:
2569 2569 date = util.parsedate(date)
2570 2570 else:
2571 2571 date = None
2572 2572 prec = parsenodeid(precursor)
2573 2573 parents = None
2574 2574 if opts['record_parents']:
2575 2575 if prec not in repo.unfiltered():
2576 2576 raise util.Abort('cannot used --record-parents on '
2577 2577 'unknown changesets')
2578 2578 parents = repo.unfiltered()[prec].parents()
2579 2579 parents = tuple(p.node() for p in parents)
2580 2580 repo.obsstore.create(tr, prec, succs, opts['flags'],
2581 2581 parents=parents, date=date,
2582 2582 metadata=metadata)
2583 2583 tr.close()
2584 2584 except ValueError, exc:
2585 2585 raise util.Abort(_('bad obsmarker input: %s') % exc)
2586 2586 finally:
2587 2587 tr.release()
2588 2588 finally:
2589 2589 l.release()
2590 2590 else:
2591 2591 if opts['rev']:
2592 2592 revs = scmutil.revrange(repo, opts['rev'])
2593 2593 nodes = [repo[r].node() for r in revs]
2594 2594 markers = list(obsolete.getmarkers(repo, nodes=nodes))
2595 2595 markers.sort(key=lambda x: x._data)
2596 2596 else:
2597 2597 markers = obsolete.getmarkers(repo)
2598 2598
2599 2599 for m in markers:
2600 2600 cmdutil.showmarker(ui, m)
2601 2601
2602 2602 @command('debugpathcomplete',
2603 2603 [('f', 'full', None, _('complete an entire path')),
2604 2604 ('n', 'normal', None, _('show only normal files')),
2605 2605 ('a', 'added', None, _('show only added files')),
2606 2606 ('r', 'removed', None, _('show only removed files'))],
2607 2607 _('FILESPEC...'))
2608 2608 def debugpathcomplete(ui, repo, *specs, **opts):
2609 2609 '''complete part or all of a tracked path
2610 2610
2611 2611 This command supports shells that offer path name completion. It
2612 2612 currently completes only files already known to the dirstate.
2613 2613
2614 2614 Completion extends only to the next path segment unless
2615 2615 --full is specified, in which case entire paths are used.'''
2616 2616
2617 2617 def complete(path, acceptable):
2618 2618 dirstate = repo.dirstate
2619 2619 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2620 2620 rootdir = repo.root + os.sep
2621 2621 if spec != repo.root and not spec.startswith(rootdir):
2622 2622 return [], []
2623 2623 if os.path.isdir(spec):
2624 2624 spec += '/'
2625 2625 spec = spec[len(rootdir):]
2626 2626 fixpaths = os.sep != '/'
2627 2627 if fixpaths:
2628 2628 spec = spec.replace(os.sep, '/')
2629 2629 speclen = len(spec)
2630 2630 fullpaths = opts['full']
2631 2631 files, dirs = set(), set()
2632 2632 adddir, addfile = dirs.add, files.add
2633 2633 for f, st in dirstate.iteritems():
2634 2634 if f.startswith(spec) and st[0] in acceptable:
2635 2635 if fixpaths:
2636 2636 f = f.replace('/', os.sep)
2637 2637 if fullpaths:
2638 2638 addfile(f)
2639 2639 continue
2640 2640 s = f.find(os.sep, speclen)
2641 2641 if s >= 0:
2642 2642 adddir(f[:s])
2643 2643 else:
2644 2644 addfile(f)
2645 2645 return files, dirs
2646 2646
2647 2647 acceptable = ''
2648 2648 if opts['normal']:
2649 2649 acceptable += 'nm'
2650 2650 if opts['added']:
2651 2651 acceptable += 'a'
2652 2652 if opts['removed']:
2653 2653 acceptable += 'r'
2654 2654 cwd = repo.getcwd()
2655 2655 if not specs:
2656 2656 specs = ['.']
2657 2657
2658 2658 files, dirs = set(), set()
2659 2659 for spec in specs:
2660 2660 f, d = complete(spec, acceptable or 'nmar')
2661 2661 files.update(f)
2662 2662 dirs.update(d)
2663 2663 files.update(dirs)
2664 2664 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2665 2665 ui.write('\n')
2666 2666
2667 2667 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2668 2668 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2669 2669 '''access the pushkey key/value protocol
2670 2670
2671 2671 With two args, list the keys in the given namespace.
2672 2672
2673 2673 With five args, set a key to new if it currently is set to old.
2674 2674 Reports success or failure.
2675 2675 '''
2676 2676
2677 2677 target = hg.peer(ui, {}, repopath)
2678 2678 if keyinfo:
2679 2679 key, old, new = keyinfo
2680 2680 r = target.pushkey(namespace, key, old, new)
2681 2681 ui.status(str(r) + '\n')
2682 2682 return not r
2683 2683 else:
2684 2684 for k, v in sorted(target.listkeys(namespace).iteritems()):
2685 2685 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2686 2686 v.encode('string-escape')))
2687 2687
2688 2688 @command('debugpvec', [], _('A B'))
2689 2689 def debugpvec(ui, repo, a, b=None):
2690 2690 ca = scmutil.revsingle(repo, a)
2691 2691 cb = scmutil.revsingle(repo, b)
2692 2692 pa = pvec.ctxpvec(ca)
2693 2693 pb = pvec.ctxpvec(cb)
2694 2694 if pa == pb:
2695 2695 rel = "="
2696 2696 elif pa > pb:
2697 2697 rel = ">"
2698 2698 elif pa < pb:
2699 2699 rel = "<"
2700 2700 elif pa | pb:
2701 2701 rel = "|"
2702 2702 ui.write(_("a: %s\n") % pa)
2703 2703 ui.write(_("b: %s\n") % pb)
2704 2704 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2705 2705 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2706 2706 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2707 2707 pa.distance(pb), rel))
2708 2708
2709 2709 @command('debugrebuilddirstate|debugrebuildstate',
2710 2710 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2711 2711 _('[-r REV]'))
2712 2712 def debugrebuilddirstate(ui, repo, rev):
2713 2713 """rebuild the dirstate as it would look like for the given revision
2714 2714
2715 2715 If no revision is specified the first current parent will be used.
2716 2716
2717 2717 The dirstate will be set to the files of the given revision.
2718 2718 The actual working directory content or existing dirstate
2719 2719 information such as adds or removes is not considered.
2720 2720
2721 2721 One use of this command is to make the next :hg:`status` invocation
2722 2722 check the actual file content.
2723 2723 """
2724 2724 ctx = scmutil.revsingle(repo, rev)
2725 2725 wlock = repo.wlock()
2726 2726 try:
2727 2727 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2728 2728 finally:
2729 2729 wlock.release()
2730 2730
2731 2731 @command('debugrebuildfncache', [], '')
2732 2732 def debugrebuildfncache(ui, repo):
2733 2733 """rebuild the fncache file"""
2734 2734 repair.rebuildfncache(ui, repo)
2735 2735
2736 2736 @command('debugrename',
2737 2737 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2738 2738 _('[-r REV] FILE'))
2739 2739 def debugrename(ui, repo, file1, *pats, **opts):
2740 2740 """dump rename information"""
2741 2741
2742 2742 ctx = scmutil.revsingle(repo, opts.get('rev'))
2743 2743 m = scmutil.match(ctx, (file1,) + pats, opts)
2744 2744 for abs in ctx.walk(m):
2745 2745 fctx = ctx[abs]
2746 2746 o = fctx.filelog().renamed(fctx.filenode())
2747 2747 rel = m.rel(abs)
2748 2748 if o:
2749 2749 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2750 2750 else:
2751 2751 ui.write(_("%s not renamed\n") % rel)
2752 2752
2753 2753 @command('debugrevlog',
2754 2754 [('c', 'changelog', False, _('open changelog')),
2755 2755 ('m', 'manifest', False, _('open manifest')),
2756 2756 ('', 'dir', False, _('open directory manifest')),
2757 2757 ('d', 'dump', False, _('dump index data'))],
2758 2758 _('-c|-m|FILE'),
2759 2759 optionalrepo=True)
2760 2760 def debugrevlog(ui, repo, file_=None, **opts):
2761 2761 """show data and statistics about a revlog"""
2762 2762 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2763 2763
2764 2764 if opts.get("dump"):
2765 2765 numrevs = len(r)
2766 2766 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2767 2767 " rawsize totalsize compression heads chainlen\n")
2768 2768 ts = 0
2769 2769 heads = set()
2770 2770
2771 2771 for rev in xrange(numrevs):
2772 2772 dbase = r.deltaparent(rev)
2773 2773 if dbase == -1:
2774 2774 dbase = rev
2775 2775 cbase = r.chainbase(rev)
2776 2776 clen = r.chainlen(rev)
2777 2777 p1, p2 = r.parentrevs(rev)
2778 2778 rs = r.rawsize(rev)
2779 2779 ts = ts + rs
2780 2780 heads -= set(r.parentrevs(rev))
2781 2781 heads.add(rev)
2782 2782 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2783 2783 "%11d %5d %8d\n" %
2784 2784 (rev, p1, p2, r.start(rev), r.end(rev),
2785 2785 r.start(dbase), r.start(cbase),
2786 2786 r.start(p1), r.start(p2),
2787 2787 rs, ts, ts / r.end(rev), len(heads), clen))
2788 2788 return 0
2789 2789
2790 2790 v = r.version
2791 2791 format = v & 0xFFFF
2792 2792 flags = []
2793 2793 gdelta = False
2794 2794 if v & revlog.REVLOGNGINLINEDATA:
2795 2795 flags.append('inline')
2796 2796 if v & revlog.REVLOGGENERALDELTA:
2797 2797 gdelta = True
2798 2798 flags.append('generaldelta')
2799 2799 if not flags:
2800 2800 flags = ['(none)']
2801 2801
2802 2802 nummerges = 0
2803 2803 numfull = 0
2804 2804 numprev = 0
2805 2805 nump1 = 0
2806 2806 nump2 = 0
2807 2807 numother = 0
2808 2808 nump1prev = 0
2809 2809 nump2prev = 0
2810 2810 chainlengths = []
2811 2811
2812 2812 datasize = [None, 0, 0L]
2813 2813 fullsize = [None, 0, 0L]
2814 2814 deltasize = [None, 0, 0L]
2815 2815
2816 2816 def addsize(size, l):
2817 2817 if l[0] is None or size < l[0]:
2818 2818 l[0] = size
2819 2819 if size > l[1]:
2820 2820 l[1] = size
2821 2821 l[2] += size
2822 2822
2823 2823 numrevs = len(r)
2824 2824 for rev in xrange(numrevs):
2825 2825 p1, p2 = r.parentrevs(rev)
2826 2826 delta = r.deltaparent(rev)
2827 2827 if format > 0:
2828 2828 addsize(r.rawsize(rev), datasize)
2829 2829 if p2 != nullrev:
2830 2830 nummerges += 1
2831 2831 size = r.length(rev)
2832 2832 if delta == nullrev:
2833 2833 chainlengths.append(0)
2834 2834 numfull += 1
2835 2835 addsize(size, fullsize)
2836 2836 else:
2837 2837 chainlengths.append(chainlengths[delta] + 1)
2838 2838 addsize(size, deltasize)
2839 2839 if delta == rev - 1:
2840 2840 numprev += 1
2841 2841 if delta == p1:
2842 2842 nump1prev += 1
2843 2843 elif delta == p2:
2844 2844 nump2prev += 1
2845 2845 elif delta == p1:
2846 2846 nump1 += 1
2847 2847 elif delta == p2:
2848 2848 nump2 += 1
2849 2849 elif delta != nullrev:
2850 2850 numother += 1
2851 2851
2852 2852 # Adjust size min value for empty cases
2853 2853 for size in (datasize, fullsize, deltasize):
2854 2854 if size[0] is None:
2855 2855 size[0] = 0
2856 2856
2857 2857 numdeltas = numrevs - numfull
2858 2858 numoprev = numprev - nump1prev - nump2prev
2859 2859 totalrawsize = datasize[2]
2860 2860 datasize[2] /= numrevs
2861 2861 fulltotal = fullsize[2]
2862 2862 fullsize[2] /= numfull
2863 2863 deltatotal = deltasize[2]
2864 2864 if numrevs - numfull > 0:
2865 2865 deltasize[2] /= numrevs - numfull
2866 2866 totalsize = fulltotal + deltatotal
2867 2867 avgchainlen = sum(chainlengths) / numrevs
2868 2868 maxchainlen = max(chainlengths)
2869 2869 compratio = totalrawsize / totalsize
2870 2870
2871 2871 basedfmtstr = '%%%dd\n'
2872 2872 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2873 2873
2874 2874 def dfmtstr(max):
2875 2875 return basedfmtstr % len(str(max))
2876 2876 def pcfmtstr(max, padding=0):
2877 2877 return basepcfmtstr % (len(str(max)), ' ' * padding)
2878 2878
2879 2879 def pcfmt(value, total):
2880 2880 return (value, 100 * float(value) / total)
2881 2881
2882 2882 ui.write(('format : %d\n') % format)
2883 2883 ui.write(('flags : %s\n') % ', '.join(flags))
2884 2884
2885 2885 ui.write('\n')
2886 2886 fmt = pcfmtstr(totalsize)
2887 2887 fmt2 = dfmtstr(totalsize)
2888 2888 ui.write(('revisions : ') + fmt2 % numrevs)
2889 2889 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2890 2890 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2891 2891 ui.write(('revisions : ') + fmt2 % numrevs)
2892 2892 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2893 2893 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2894 2894 ui.write(('revision size : ') + fmt2 % totalsize)
2895 2895 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2896 2896 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2897 2897
2898 2898 ui.write('\n')
2899 2899 fmt = dfmtstr(max(avgchainlen, compratio))
2900 2900 ui.write(('avg chain length : ') + fmt % avgchainlen)
2901 2901 ui.write(('max chain length : ') + fmt % maxchainlen)
2902 2902 ui.write(('compression ratio : ') + fmt % compratio)
2903 2903
2904 2904 if format > 0:
2905 2905 ui.write('\n')
2906 2906 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2907 2907 % tuple(datasize))
2908 2908 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2909 2909 % tuple(fullsize))
2910 2910 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2911 2911 % tuple(deltasize))
2912 2912
2913 2913 if numdeltas > 0:
2914 2914 ui.write('\n')
2915 2915 fmt = pcfmtstr(numdeltas)
2916 2916 fmt2 = pcfmtstr(numdeltas, 4)
2917 2917 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2918 2918 if numprev > 0:
2919 2919 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2920 2920 numprev))
2921 2921 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2922 2922 numprev))
2923 2923 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2924 2924 numprev))
2925 2925 if gdelta:
2926 2926 ui.write(('deltas against p1 : ')
2927 2927 + fmt % pcfmt(nump1, numdeltas))
2928 2928 ui.write(('deltas against p2 : ')
2929 2929 + fmt % pcfmt(nump2, numdeltas))
2930 2930 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2931 2931 numdeltas))
2932 2932
2933 2933 @command('debugrevspec',
2934 2934 [('', 'optimize', None, _('print parsed tree after optimizing'))],
2935 2935 ('REVSPEC'))
2936 2936 def debugrevspec(ui, repo, expr, **opts):
2937 2937 """parse and apply a revision specification
2938 2938
2939 2939 Use --verbose to print the parsed tree before and after aliases
2940 2940 expansion.
2941 2941 """
2942 2942 if ui.verbose:
2943 2943 tree = revset.parse(expr)
2944 2944 ui.note(revset.prettyformat(tree), "\n")
2945 2945 newtree = revset.findaliases(ui, tree)
2946 2946 if newtree != tree:
2947 2947 ui.note(revset.prettyformat(newtree), "\n")
2948 2948 tree = newtree
2949 2949 newtree = revset.foldconcat(tree)
2950 2950 if newtree != tree:
2951 2951 ui.note(revset.prettyformat(newtree), "\n")
2952 2952 if opts["optimize"]:
2953 2953 weight, optimizedtree = revset.optimize(newtree, True)
2954 2954 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
2955 2955 func = revset.match(ui, expr)
2956 2956 revs = func(repo)
2957 2957 if ui.verbose:
2958 2958 ui.note("* set:\n", revset.prettyformatset(revs), "\n")
2959 2959 for c in revs:
2960 2960 ui.write("%s\n" % c)
2961 2961
2962 2962 @command('debugsetparents', [], _('REV1 [REV2]'))
2963 2963 def debugsetparents(ui, repo, rev1, rev2=None):
2964 2964 """manually set the parents of the current working directory
2965 2965
2966 2966 This is useful for writing repository conversion tools, but should
2967 2967 be used with care. For example, neither the working directory nor the
2968 2968 dirstate is updated, so file status may be incorrect after running this
2969 2969 command.
2970 2970
2971 2971 Returns 0 on success.
2972 2972 """
2973 2973
2974 2974 r1 = scmutil.revsingle(repo, rev1).node()
2975 2975 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2976 2976
2977 2977 wlock = repo.wlock()
2978 2978 try:
2979 2979 repo.dirstate.beginparentchange()
2980 2980 repo.setparents(r1, r2)
2981 2981 repo.dirstate.endparentchange()
2982 2982 finally:
2983 2983 wlock.release()
2984 2984
2985 2985 @command('debugdirstate|debugstate',
2986 2986 [('', 'nodates', None, _('do not display the saved mtime')),
2987 2987 ('', 'datesort', None, _('sort by saved mtime'))],
2988 2988 _('[OPTION]...'))
2989 2989 def debugstate(ui, repo, nodates=None, datesort=None):
2990 2990 """show the contents of the current dirstate"""
2991 2991 timestr = ""
2992 2992 if datesort:
2993 2993 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2994 2994 else:
2995 2995 keyfunc = None # sort by filename
2996 2996 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2997 2997 if ent[3] == -1:
2998 2998 timestr = 'unset '
2999 2999 elif nodates:
3000 3000 timestr = 'set '
3001 3001 else:
3002 3002 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
3003 3003 time.localtime(ent[3]))
3004 if ent[1] & 020000:
3004 if ent[1] & 0o20000:
3005 3005 mode = 'lnk'
3006 3006 else:
3007 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
3007 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
3008 3008 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
3009 3009 for f in repo.dirstate.copies():
3010 3010 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
3011 3011
3012 3012 @command('debugsub',
3013 3013 [('r', 'rev', '',
3014 3014 _('revision to check'), _('REV'))],
3015 3015 _('[-r REV] [REV]'))
3016 3016 def debugsub(ui, repo, rev=None):
3017 3017 ctx = scmutil.revsingle(repo, rev, None)
3018 3018 for k, v in sorted(ctx.substate.items()):
3019 3019 ui.write(('path %s\n') % k)
3020 3020 ui.write((' source %s\n') % v[0])
3021 3021 ui.write((' revision %s\n') % v[1])
3022 3022
3023 3023 @command('debugsuccessorssets',
3024 3024 [],
3025 3025 _('[REV]'))
3026 3026 def debugsuccessorssets(ui, repo, *revs):
3027 3027 """show set of successors for revision
3028 3028
3029 3029 A successors set of changeset A is a consistent group of revisions that
3030 3030 succeed A. It contains non-obsolete changesets only.
3031 3031
3032 3032 In most cases a changeset A has a single successors set containing a single
3033 3033 successor (changeset A replaced by A').
3034 3034
3035 3035 A changeset that is made obsolete with no successors are called "pruned".
3036 3036 Such changesets have no successors sets at all.
3037 3037
3038 3038 A changeset that has been "split" will have a successors set containing
3039 3039 more than one successor.
3040 3040
3041 3041 A changeset that has been rewritten in multiple different ways is called
3042 3042 "divergent". Such changesets have multiple successor sets (each of which
3043 3043 may also be split, i.e. have multiple successors).
3044 3044
3045 3045 Results are displayed as follows::
3046 3046
3047 3047 <rev1>
3048 3048 <successors-1A>
3049 3049 <rev2>
3050 3050 <successors-2A>
3051 3051 <successors-2B1> <successors-2B2> <successors-2B3>
3052 3052
3053 3053 Here rev2 has two possible (i.e. divergent) successors sets. The first
3054 3054 holds one element, whereas the second holds three (i.e. the changeset has
3055 3055 been split).
3056 3056 """
3057 3057 # passed to successorssets caching computation from one call to another
3058 3058 cache = {}
3059 3059 ctx2str = str
3060 3060 node2str = short
3061 3061 if ui.debug():
3062 3062 def ctx2str(ctx):
3063 3063 return ctx.hex()
3064 3064 node2str = hex
3065 3065 for rev in scmutil.revrange(repo, revs):
3066 3066 ctx = repo[rev]
3067 3067 ui.write('%s\n'% ctx2str(ctx))
3068 3068 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
3069 3069 if succsset:
3070 3070 ui.write(' ')
3071 3071 ui.write(node2str(succsset[0]))
3072 3072 for node in succsset[1:]:
3073 3073 ui.write(' ')
3074 3074 ui.write(node2str(node))
3075 3075 ui.write('\n')
3076 3076
3077 3077 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
3078 3078 def debugwalk(ui, repo, *pats, **opts):
3079 3079 """show how files match on given patterns"""
3080 3080 m = scmutil.match(repo[None], pats, opts)
3081 3081 items = list(repo.walk(m))
3082 3082 if not items:
3083 3083 return
3084 3084 f = lambda fn: fn
3085 3085 if ui.configbool('ui', 'slash') and os.sep != '/':
3086 3086 f = lambda fn: util.normpath(fn)
3087 3087 fmt = 'f %%-%ds %%-%ds %%s' % (
3088 3088 max([len(abs) for abs in items]),
3089 3089 max([len(m.rel(abs)) for abs in items]))
3090 3090 for abs in items:
3091 3091 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
3092 3092 ui.write("%s\n" % line.rstrip())
3093 3093
3094 3094 @command('debugwireargs',
3095 3095 [('', 'three', '', 'three'),
3096 3096 ('', 'four', '', 'four'),
3097 3097 ('', 'five', '', 'five'),
3098 3098 ] + remoteopts,
3099 3099 _('REPO [OPTIONS]... [ONE [TWO]]'),
3100 3100 norepo=True)
3101 3101 def debugwireargs(ui, repopath, *vals, **opts):
3102 3102 repo = hg.peer(ui, opts, repopath)
3103 3103 for opt in remoteopts:
3104 3104 del opts[opt[1]]
3105 3105 args = {}
3106 3106 for k, v in opts.iteritems():
3107 3107 if v:
3108 3108 args[k] = v
3109 3109 # run twice to check that we don't mess up the stream for the next command
3110 3110 res1 = repo.debugwireargs(*vals, **args)
3111 3111 res2 = repo.debugwireargs(*vals, **args)
3112 3112 ui.write("%s\n" % res1)
3113 3113 if res1 != res2:
3114 3114 ui.warn("%s\n" % res2)
3115 3115
3116 3116 @command('^diff',
3117 3117 [('r', 'rev', [], _('revision'), _('REV')),
3118 3118 ('c', 'change', '', _('change made by revision'), _('REV'))
3119 3119 ] + diffopts + diffopts2 + walkopts + subrepoopts,
3120 3120 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
3121 3121 inferrepo=True)
3122 3122 def diff(ui, repo, *pats, **opts):
3123 3123 """diff repository (or selected files)
3124 3124
3125 3125 Show differences between revisions for the specified files.
3126 3126
3127 3127 Differences between files are shown using the unified diff format.
3128 3128
3129 3129 .. note::
3130 3130
3131 3131 diff may generate unexpected results for merges, as it will
3132 3132 default to comparing against the working directory's first
3133 3133 parent changeset if no revisions are specified.
3134 3134
3135 3135 When two revision arguments are given, then changes are shown
3136 3136 between those revisions. If only one revision is specified then
3137 3137 that revision is compared to the working directory, and, when no
3138 3138 revisions are specified, the working directory files are compared
3139 3139 to its parent.
3140 3140
3141 3141 Alternatively you can specify -c/--change with a revision to see
3142 3142 the changes in that changeset relative to its first parent.
3143 3143
3144 3144 Without the -a/--text option, diff will avoid generating diffs of
3145 3145 files it detects as binary. With -a, diff will generate a diff
3146 3146 anyway, probably with undesirable results.
3147 3147
3148 3148 Use the -g/--git option to generate diffs in the git extended diff
3149 3149 format. For more information, read :hg:`help diffs`.
3150 3150
3151 3151 .. container:: verbose
3152 3152
3153 3153 Examples:
3154 3154
3155 3155 - compare a file in the current working directory to its parent::
3156 3156
3157 3157 hg diff foo.c
3158 3158
3159 3159 - compare two historical versions of a directory, with rename info::
3160 3160
3161 3161 hg diff --git -r 1.0:1.2 lib/
3162 3162
3163 3163 - get change stats relative to the last change on some date::
3164 3164
3165 3165 hg diff --stat -r "date('may 2')"
3166 3166
3167 3167 - diff all newly-added files that contain a keyword::
3168 3168
3169 3169 hg diff "set:added() and grep(GNU)"
3170 3170
3171 3171 - compare a revision and its parents::
3172 3172
3173 3173 hg diff -c 9353 # compare against first parent
3174 3174 hg diff -r 9353^:9353 # same using revset syntax
3175 3175 hg diff -r 9353^2:9353 # compare against the second parent
3176 3176
3177 3177 Returns 0 on success.
3178 3178 """
3179 3179
3180 3180 revs = opts.get('rev')
3181 3181 change = opts.get('change')
3182 3182 stat = opts.get('stat')
3183 3183 reverse = opts.get('reverse')
3184 3184
3185 3185 if revs and change:
3186 3186 msg = _('cannot specify --rev and --change at the same time')
3187 3187 raise util.Abort(msg)
3188 3188 elif change:
3189 3189 node2 = scmutil.revsingle(repo, change, None).node()
3190 3190 node1 = repo[node2].p1().node()
3191 3191 else:
3192 3192 node1, node2 = scmutil.revpair(repo, revs)
3193 3193
3194 3194 if reverse:
3195 3195 node1, node2 = node2, node1
3196 3196
3197 3197 diffopts = patch.diffallopts(ui, opts)
3198 3198 m = scmutil.match(repo[node2], pats, opts)
3199 3199 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
3200 3200 listsubrepos=opts.get('subrepos'),
3201 3201 root=opts.get('root'))
3202 3202
3203 3203 @command('^export',
3204 3204 [('o', 'output', '',
3205 3205 _('print output to file with formatted name'), _('FORMAT')),
3206 3206 ('', 'switch-parent', None, _('diff against the second parent')),
3207 3207 ('r', 'rev', [], _('revisions to export'), _('REV')),
3208 3208 ] + diffopts,
3209 3209 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
3210 3210 def export(ui, repo, *changesets, **opts):
3211 3211 """dump the header and diffs for one or more changesets
3212 3212
3213 3213 Print the changeset header and diffs for one or more revisions.
3214 3214 If no revision is given, the parent of the working directory is used.
3215 3215
3216 3216 The information shown in the changeset header is: author, date,
3217 3217 branch name (if non-default), changeset hash, parent(s) and commit
3218 3218 comment.
3219 3219
3220 3220 .. note::
3221 3221
3222 3222 export may generate unexpected diff output for merge
3223 3223 changesets, as it will compare the merge changeset against its
3224 3224 first parent only.
3225 3225
3226 3226 Output may be to a file, in which case the name of the file is
3227 3227 given using a format string. The formatting rules are as follows:
3228 3228
3229 3229 :``%%``: literal "%" character
3230 3230 :``%H``: changeset hash (40 hexadecimal digits)
3231 3231 :``%N``: number of patches being generated
3232 3232 :``%R``: changeset revision number
3233 3233 :``%b``: basename of the exporting repository
3234 3234 :``%h``: short-form changeset hash (12 hexadecimal digits)
3235 3235 :``%m``: first line of the commit message (only alphanumeric characters)
3236 3236 :``%n``: zero-padded sequence number, starting at 1
3237 3237 :``%r``: zero-padded changeset revision number
3238 3238
3239 3239 Without the -a/--text option, export will avoid generating diffs
3240 3240 of files it detects as binary. With -a, export will generate a
3241 3241 diff anyway, probably with undesirable results.
3242 3242
3243 3243 Use the -g/--git option to generate diffs in the git extended diff
3244 3244 format. See :hg:`help diffs` for more information.
3245 3245
3246 3246 With the --switch-parent option, the diff will be against the
3247 3247 second parent. It can be useful to review a merge.
3248 3248
3249 3249 .. container:: verbose
3250 3250
3251 3251 Examples:
3252 3252
3253 3253 - use export and import to transplant a bugfix to the current
3254 3254 branch::
3255 3255
3256 3256 hg export -r 9353 | hg import -
3257 3257
3258 3258 - export all the changesets between two revisions to a file with
3259 3259 rename information::
3260 3260
3261 3261 hg export --git -r 123:150 > changes.txt
3262 3262
3263 3263 - split outgoing changes into a series of patches with
3264 3264 descriptive names::
3265 3265
3266 3266 hg export -r "outgoing()" -o "%n-%m.patch"
3267 3267
3268 3268 Returns 0 on success.
3269 3269 """
3270 3270 changesets += tuple(opts.get('rev', []))
3271 3271 if not changesets:
3272 3272 changesets = ['.']
3273 3273 revs = scmutil.revrange(repo, changesets)
3274 3274 if not revs:
3275 3275 raise util.Abort(_("export requires at least one changeset"))
3276 3276 if len(revs) > 1:
3277 3277 ui.note(_('exporting patches:\n'))
3278 3278 else:
3279 3279 ui.note(_('exporting patch:\n'))
3280 3280 cmdutil.export(repo, revs, template=opts.get('output'),
3281 3281 switch_parent=opts.get('switch_parent'),
3282 3282 opts=patch.diffallopts(ui, opts))
3283 3283
3284 3284 @command('files',
3285 3285 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3286 3286 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3287 3287 ] + walkopts + formatteropts + subrepoopts,
3288 3288 _('[OPTION]... [PATTERN]...'))
3289 3289 def files(ui, repo, *pats, **opts):
3290 3290 """list tracked files
3291 3291
3292 3292 Print files under Mercurial control in the working directory or
3293 3293 specified revision whose names match the given patterns (excluding
3294 3294 removed files).
3295 3295
3296 3296 If no patterns are given to match, this command prints the names
3297 3297 of all files under Mercurial control in the working directory.
3298 3298
3299 3299 .. container:: verbose
3300 3300
3301 3301 Examples:
3302 3302
3303 3303 - list all files under the current directory::
3304 3304
3305 3305 hg files .
3306 3306
3307 3307 - shows sizes and flags for current revision::
3308 3308
3309 3309 hg files -vr .
3310 3310
3311 3311 - list all files named README::
3312 3312
3313 3313 hg files -I "**/README"
3314 3314
3315 3315 - list all binary files::
3316 3316
3317 3317 hg files "set:binary()"
3318 3318
3319 3319 - find files containing a regular expression::
3320 3320
3321 3321 hg files "set:grep('bob')"
3322 3322
3323 3323 - search tracked file contents with xargs and grep::
3324 3324
3325 3325 hg files -0 | xargs -0 grep foo
3326 3326
3327 3327 See :hg:`help patterns` and :hg:`help filesets` for more information
3328 3328 on specifying file patterns.
3329 3329
3330 3330 Returns 0 if a match is found, 1 otherwise.
3331 3331
3332 3332 """
3333 3333 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3334 3334
3335 3335 end = '\n'
3336 3336 if opts.get('print0'):
3337 3337 end = '\0'
3338 3338 fm = ui.formatter('files', opts)
3339 3339 fmt = '%s' + end
3340 3340
3341 3341 m = scmutil.match(ctx, pats, opts)
3342 3342 ret = cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
3343 3343
3344 3344 fm.end()
3345 3345
3346 3346 return ret
3347 3347
3348 3348 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3349 3349 def forget(ui, repo, *pats, **opts):
3350 3350 """forget the specified files on the next commit
3351 3351
3352 3352 Mark the specified files so they will no longer be tracked
3353 3353 after the next commit.
3354 3354
3355 3355 This only removes files from the current branch, not from the
3356 3356 entire project history, and it does not delete them from the
3357 3357 working directory.
3358 3358
3359 3359 To undo a forget before the next commit, see :hg:`add`.
3360 3360
3361 3361 .. container:: verbose
3362 3362
3363 3363 Examples:
3364 3364
3365 3365 - forget newly-added binary files::
3366 3366
3367 3367 hg forget "set:added() and binary()"
3368 3368
3369 3369 - forget files that would be excluded by .hgignore::
3370 3370
3371 3371 hg forget "set:hgignore()"
3372 3372
3373 3373 Returns 0 on success.
3374 3374 """
3375 3375
3376 3376 if not pats:
3377 3377 raise util.Abort(_('no files specified'))
3378 3378
3379 3379 m = scmutil.match(repo[None], pats, opts)
3380 3380 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3381 3381 return rejected and 1 or 0
3382 3382
3383 3383 @command(
3384 3384 'graft',
3385 3385 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3386 3386 ('c', 'continue', False, _('resume interrupted graft')),
3387 3387 ('e', 'edit', False, _('invoke editor on commit messages')),
3388 3388 ('', 'log', None, _('append graft info to log message')),
3389 3389 ('f', 'force', False, _('force graft')),
3390 3390 ('D', 'currentdate', False,
3391 3391 _('record the current date as commit date')),
3392 3392 ('U', 'currentuser', False,
3393 3393 _('record the current user as committer'), _('DATE'))]
3394 3394 + commitopts2 + mergetoolopts + dryrunopts,
3395 3395 _('[OPTION]... [-r] REV...'))
3396 3396 def graft(ui, repo, *revs, **opts):
3397 3397 '''copy changes from other branches onto the current branch
3398 3398
3399 3399 This command uses Mercurial's merge logic to copy individual
3400 3400 changes from other branches without merging branches in the
3401 3401 history graph. This is sometimes known as 'backporting' or
3402 3402 'cherry-picking'. By default, graft will copy user, date, and
3403 3403 description from the source changesets.
3404 3404
3405 3405 Changesets that are ancestors of the current revision, that have
3406 3406 already been grafted, or that are merges will be skipped.
3407 3407
3408 3408 If --log is specified, log messages will have a comment appended
3409 3409 of the form::
3410 3410
3411 3411 (grafted from CHANGESETHASH)
3412 3412
3413 3413 If --force is specified, revisions will be grafted even if they
3414 3414 are already ancestors of or have been grafted to the destination.
3415 3415 This is useful when the revisions have since been backed out.
3416 3416
3417 3417 If a graft merge results in conflicts, the graft process is
3418 3418 interrupted so that the current merge can be manually resolved.
3419 3419 Once all conflicts are addressed, the graft process can be
3420 3420 continued with the -c/--continue option.
3421 3421
3422 3422 .. note::
3423 3423
3424 3424 The -c/--continue option does not reapply earlier options, except
3425 3425 for --force.
3426 3426
3427 3427 .. container:: verbose
3428 3428
3429 3429 Examples:
3430 3430
3431 3431 - copy a single change to the stable branch and edit its description::
3432 3432
3433 3433 hg update stable
3434 3434 hg graft --edit 9393
3435 3435
3436 3436 - graft a range of changesets with one exception, updating dates::
3437 3437
3438 3438 hg graft -D "2085::2093 and not 2091"
3439 3439
3440 3440 - continue a graft after resolving conflicts::
3441 3441
3442 3442 hg graft -c
3443 3443
3444 3444 - show the source of a grafted changeset::
3445 3445
3446 3446 hg log --debug -r .
3447 3447
3448 3448 See :hg:`help revisions` and :hg:`help revsets` for more about
3449 3449 specifying revisions.
3450 3450
3451 3451 Returns 0 on successful completion.
3452 3452 '''
3453 3453
3454 3454 revs = list(revs)
3455 3455 revs.extend(opts['rev'])
3456 3456
3457 3457 if not opts.get('user') and opts.get('currentuser'):
3458 3458 opts['user'] = ui.username()
3459 3459 if not opts.get('date') and opts.get('currentdate'):
3460 3460 opts['date'] = "%d %d" % util.makedate()
3461 3461
3462 3462 editor = cmdutil.getcommiteditor(editform='graft', **opts)
3463 3463
3464 3464 cont = False
3465 3465 if opts['continue']:
3466 3466 cont = True
3467 3467 if revs:
3468 3468 raise util.Abort(_("can't specify --continue and revisions"))
3469 3469 # read in unfinished revisions
3470 3470 try:
3471 3471 nodes = repo.vfs.read('graftstate').splitlines()
3472 3472 revs = [repo[node].rev() for node in nodes]
3473 3473 except IOError, inst:
3474 3474 if inst.errno != errno.ENOENT:
3475 3475 raise
3476 3476 raise util.Abort(_("no graft state found, can't continue"))
3477 3477 else:
3478 3478 cmdutil.checkunfinished(repo)
3479 3479 cmdutil.bailifchanged(repo)
3480 3480 if not revs:
3481 3481 raise util.Abort(_('no revisions specified'))
3482 3482 revs = scmutil.revrange(repo, revs)
3483 3483
3484 3484 skipped = set()
3485 3485 # check for merges
3486 3486 for rev in repo.revs('%ld and merge()', revs):
3487 3487 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3488 3488 skipped.add(rev)
3489 3489 revs = [r for r in revs if r not in skipped]
3490 3490 if not revs:
3491 3491 return -1
3492 3492
3493 3493 # Don't check in the --continue case, in effect retaining --force across
3494 3494 # --continues. That's because without --force, any revisions we decided to
3495 3495 # skip would have been filtered out here, so they wouldn't have made their
3496 3496 # way to the graftstate. With --force, any revisions we would have otherwise
3497 3497 # skipped would not have been filtered out, and if they hadn't been applied
3498 3498 # already, they'd have been in the graftstate.
3499 3499 if not (cont or opts.get('force')):
3500 3500 # check for ancestors of dest branch
3501 3501 crev = repo['.'].rev()
3502 3502 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3503 3503 # Cannot use x.remove(y) on smart set, this has to be a list.
3504 3504 # XXX make this lazy in the future
3505 3505 revs = list(revs)
3506 3506 # don't mutate while iterating, create a copy
3507 3507 for rev in list(revs):
3508 3508 if rev in ancestors:
3509 3509 ui.warn(_('skipping ancestor revision %d:%s\n') %
3510 3510 (rev, repo[rev]))
3511 3511 # XXX remove on list is slow
3512 3512 revs.remove(rev)
3513 3513 if not revs:
3514 3514 return -1
3515 3515
3516 3516 # analyze revs for earlier grafts
3517 3517 ids = {}
3518 3518 for ctx in repo.set("%ld", revs):
3519 3519 ids[ctx.hex()] = ctx.rev()
3520 3520 n = ctx.extra().get('source')
3521 3521 if n:
3522 3522 ids[n] = ctx.rev()
3523 3523
3524 3524 # check ancestors for earlier grafts
3525 3525 ui.debug('scanning for duplicate grafts\n')
3526 3526
3527 3527 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3528 3528 ctx = repo[rev]
3529 3529 n = ctx.extra().get('source')
3530 3530 if n in ids:
3531 3531 try:
3532 3532 r = repo[n].rev()
3533 3533 except error.RepoLookupError:
3534 3534 r = None
3535 3535 if r in revs:
3536 3536 ui.warn(_('skipping revision %d:%s '
3537 3537 '(already grafted to %d:%s)\n')
3538 3538 % (r, repo[r], rev, ctx))
3539 3539 revs.remove(r)
3540 3540 elif ids[n] in revs:
3541 3541 if r is None:
3542 3542 ui.warn(_('skipping already grafted revision %d:%s '
3543 3543 '(%d:%s also has unknown origin %s)\n')
3544 3544 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
3545 3545 else:
3546 3546 ui.warn(_('skipping already grafted revision %d:%s '
3547 3547 '(%d:%s also has origin %d:%s)\n')
3548 3548 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
3549 3549 revs.remove(ids[n])
3550 3550 elif ctx.hex() in ids:
3551 3551 r = ids[ctx.hex()]
3552 3552 ui.warn(_('skipping already grafted revision %d:%s '
3553 3553 '(was grafted from %d:%s)\n') %
3554 3554 (r, repo[r], rev, ctx))
3555 3555 revs.remove(r)
3556 3556 if not revs:
3557 3557 return -1
3558 3558
3559 3559 wlock = repo.wlock()
3560 3560 try:
3561 3561 for pos, ctx in enumerate(repo.set("%ld", revs)):
3562 3562 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
3563 3563 ctx.description().split('\n', 1)[0])
3564 3564 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3565 3565 if names:
3566 3566 desc += ' (%s)' % ' '.join(names)
3567 3567 ui.status(_('grafting %s\n') % desc)
3568 3568 if opts.get('dry_run'):
3569 3569 continue
3570 3570
3571 3571 source = ctx.extra().get('source')
3572 3572 extra = {}
3573 3573 if source:
3574 3574 extra['source'] = source
3575 3575 extra['intermediate-source'] = ctx.hex()
3576 3576 else:
3577 3577 extra['source'] = ctx.hex()
3578 3578 user = ctx.user()
3579 3579 if opts.get('user'):
3580 3580 user = opts['user']
3581 3581 date = ctx.date()
3582 3582 if opts.get('date'):
3583 3583 date = opts['date']
3584 3584 message = ctx.description()
3585 3585 if opts.get('log'):
3586 3586 message += '\n(grafted from %s)' % ctx.hex()
3587 3587
3588 3588 # we don't merge the first commit when continuing
3589 3589 if not cont:
3590 3590 # perform the graft merge with p1(rev) as 'ancestor'
3591 3591 try:
3592 3592 # ui.forcemerge is an internal variable, do not document
3593 3593 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3594 3594 'graft')
3595 3595 stats = mergemod.graft(repo, ctx, ctx.p1(),
3596 3596 ['local', 'graft'])
3597 3597 finally:
3598 3598 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3599 3599 # report any conflicts
3600 3600 if stats and stats[3] > 0:
3601 3601 # write out state for --continue
3602 3602 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3603 3603 repo.vfs.write('graftstate', ''.join(nodelines))
3604 3604 raise util.Abort(
3605 3605 _("unresolved conflicts, can't continue"),
3606 3606 hint=_('use hg resolve and hg graft --continue'))
3607 3607 else:
3608 3608 cont = False
3609 3609
3610 3610 # commit
3611 3611 node = repo.commit(text=message, user=user,
3612 3612 date=date, extra=extra, editor=editor)
3613 3613 if node is None:
3614 3614 ui.warn(
3615 3615 _('note: graft of %d:%s created no changes to commit\n') %
3616 3616 (ctx.rev(), ctx))
3617 3617 finally:
3618 3618 wlock.release()
3619 3619
3620 3620 # remove state when we complete successfully
3621 3621 if not opts.get('dry_run'):
3622 3622 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3623 3623
3624 3624 return 0
3625 3625
3626 3626 @command('grep',
3627 3627 [('0', 'print0', None, _('end fields with NUL')),
3628 3628 ('', 'all', None, _('print all revisions that match')),
3629 3629 ('a', 'text', None, _('treat all files as text')),
3630 3630 ('f', 'follow', None,
3631 3631 _('follow changeset history,'
3632 3632 ' or file history across copies and renames')),
3633 3633 ('i', 'ignore-case', None, _('ignore case when matching')),
3634 3634 ('l', 'files-with-matches', None,
3635 3635 _('print only filenames and revisions that match')),
3636 3636 ('n', 'line-number', None, _('print matching line numbers')),
3637 3637 ('r', 'rev', [],
3638 3638 _('only search files changed within revision range'), _('REV')),
3639 3639 ('u', 'user', None, _('list the author (long with -v)')),
3640 3640 ('d', 'date', None, _('list the date (short with -q)')),
3641 3641 ] + walkopts,
3642 3642 _('[OPTION]... PATTERN [FILE]...'),
3643 3643 inferrepo=True)
3644 3644 def grep(ui, repo, pattern, *pats, **opts):
3645 3645 """search for a pattern in specified files and revisions
3646 3646
3647 3647 Search revisions of files for a regular expression.
3648 3648
3649 3649 This command behaves differently than Unix grep. It only accepts
3650 3650 Python/Perl regexps. It searches repository history, not the
3651 3651 working directory. It always prints the revision number in which a
3652 3652 match appears.
3653 3653
3654 3654 By default, grep only prints output for the first revision of a
3655 3655 file in which it finds a match. To get it to print every revision
3656 3656 that contains a change in match status ("-" for a match that
3657 3657 becomes a non-match, or "+" for a non-match that becomes a match),
3658 3658 use the --all flag.
3659 3659
3660 3660 Returns 0 if a match is found, 1 otherwise.
3661 3661 """
3662 3662 reflags = re.M
3663 3663 if opts.get('ignore_case'):
3664 3664 reflags |= re.I
3665 3665 try:
3666 3666 regexp = util.re.compile(pattern, reflags)
3667 3667 except re.error, inst:
3668 3668 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3669 3669 return 1
3670 3670 sep, eol = ':', '\n'
3671 3671 if opts.get('print0'):
3672 3672 sep = eol = '\0'
3673 3673
3674 3674 getfile = util.lrucachefunc(repo.file)
3675 3675
3676 3676 def matchlines(body):
3677 3677 begin = 0
3678 3678 linenum = 0
3679 3679 while begin < len(body):
3680 3680 match = regexp.search(body, begin)
3681 3681 if not match:
3682 3682 break
3683 3683 mstart, mend = match.span()
3684 3684 linenum += body.count('\n', begin, mstart) + 1
3685 3685 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3686 3686 begin = body.find('\n', mend) + 1 or len(body) + 1
3687 3687 lend = begin - 1
3688 3688 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3689 3689
3690 3690 class linestate(object):
3691 3691 def __init__(self, line, linenum, colstart, colend):
3692 3692 self.line = line
3693 3693 self.linenum = linenum
3694 3694 self.colstart = colstart
3695 3695 self.colend = colend
3696 3696
3697 3697 def __hash__(self):
3698 3698 return hash((self.linenum, self.line))
3699 3699
3700 3700 def __eq__(self, other):
3701 3701 return self.line == other.line
3702 3702
3703 3703 def __iter__(self):
3704 3704 yield (self.line[:self.colstart], '')
3705 3705 yield (self.line[self.colstart:self.colend], 'grep.match')
3706 3706 rest = self.line[self.colend:]
3707 3707 while rest != '':
3708 3708 match = regexp.search(rest)
3709 3709 if not match:
3710 3710 yield (rest, '')
3711 3711 break
3712 3712 mstart, mend = match.span()
3713 3713 yield (rest[:mstart], '')
3714 3714 yield (rest[mstart:mend], 'grep.match')
3715 3715 rest = rest[mend:]
3716 3716
3717 3717 matches = {}
3718 3718 copies = {}
3719 3719 def grepbody(fn, rev, body):
3720 3720 matches[rev].setdefault(fn, [])
3721 3721 m = matches[rev][fn]
3722 3722 for lnum, cstart, cend, line in matchlines(body):
3723 3723 s = linestate(line, lnum, cstart, cend)
3724 3724 m.append(s)
3725 3725
3726 3726 def difflinestates(a, b):
3727 3727 sm = difflib.SequenceMatcher(None, a, b)
3728 3728 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3729 3729 if tag == 'insert':
3730 3730 for i in xrange(blo, bhi):
3731 3731 yield ('+', b[i])
3732 3732 elif tag == 'delete':
3733 3733 for i in xrange(alo, ahi):
3734 3734 yield ('-', a[i])
3735 3735 elif tag == 'replace':
3736 3736 for i in xrange(alo, ahi):
3737 3737 yield ('-', a[i])
3738 3738 for i in xrange(blo, bhi):
3739 3739 yield ('+', b[i])
3740 3740
3741 3741 def display(fn, ctx, pstates, states):
3742 3742 rev = ctx.rev()
3743 3743 if ui.quiet:
3744 3744 datefunc = util.shortdate
3745 3745 else:
3746 3746 datefunc = util.datestr
3747 3747 found = False
3748 3748 @util.cachefunc
3749 3749 def binary():
3750 3750 flog = getfile(fn)
3751 3751 return util.binary(flog.read(ctx.filenode(fn)))
3752 3752
3753 3753 if opts.get('all'):
3754 3754 iter = difflinestates(pstates, states)
3755 3755 else:
3756 3756 iter = [('', l) for l in states]
3757 3757 for change, l in iter:
3758 3758 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3759 3759
3760 3760 if opts.get('line_number'):
3761 3761 cols.append((str(l.linenum), 'grep.linenumber'))
3762 3762 if opts.get('all'):
3763 3763 cols.append((change, 'grep.change'))
3764 3764 if opts.get('user'):
3765 3765 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3766 3766 if opts.get('date'):
3767 3767 cols.append((datefunc(ctx.date()), 'grep.date'))
3768 3768 for col, label in cols[:-1]:
3769 3769 ui.write(col, label=label)
3770 3770 ui.write(sep, label='grep.sep')
3771 3771 ui.write(cols[-1][0], label=cols[-1][1])
3772 3772 if not opts.get('files_with_matches'):
3773 3773 ui.write(sep, label='grep.sep')
3774 3774 if not opts.get('text') and binary():
3775 3775 ui.write(" Binary file matches")
3776 3776 else:
3777 3777 for s, label in l:
3778 3778 ui.write(s, label=label)
3779 3779 ui.write(eol)
3780 3780 found = True
3781 3781 if opts.get('files_with_matches'):
3782 3782 break
3783 3783 return found
3784 3784
3785 3785 skip = {}
3786 3786 revfiles = {}
3787 3787 matchfn = scmutil.match(repo[None], pats, opts)
3788 3788 found = False
3789 3789 follow = opts.get('follow')
3790 3790
3791 3791 def prep(ctx, fns):
3792 3792 rev = ctx.rev()
3793 3793 pctx = ctx.p1()
3794 3794 parent = pctx.rev()
3795 3795 matches.setdefault(rev, {})
3796 3796 matches.setdefault(parent, {})
3797 3797 files = revfiles.setdefault(rev, [])
3798 3798 for fn in fns:
3799 3799 flog = getfile(fn)
3800 3800 try:
3801 3801 fnode = ctx.filenode(fn)
3802 3802 except error.LookupError:
3803 3803 continue
3804 3804
3805 3805 copied = flog.renamed(fnode)
3806 3806 copy = follow and copied and copied[0]
3807 3807 if copy:
3808 3808 copies.setdefault(rev, {})[fn] = copy
3809 3809 if fn in skip:
3810 3810 if copy:
3811 3811 skip[copy] = True
3812 3812 continue
3813 3813 files.append(fn)
3814 3814
3815 3815 if fn not in matches[rev]:
3816 3816 grepbody(fn, rev, flog.read(fnode))
3817 3817
3818 3818 pfn = copy or fn
3819 3819 if pfn not in matches[parent]:
3820 3820 try:
3821 3821 fnode = pctx.filenode(pfn)
3822 3822 grepbody(pfn, parent, flog.read(fnode))
3823 3823 except error.LookupError:
3824 3824 pass
3825 3825
3826 3826 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3827 3827 rev = ctx.rev()
3828 3828 parent = ctx.p1().rev()
3829 3829 for fn in sorted(revfiles.get(rev, [])):
3830 3830 states = matches[rev][fn]
3831 3831 copy = copies.get(rev, {}).get(fn)
3832 3832 if fn in skip:
3833 3833 if copy:
3834 3834 skip[copy] = True
3835 3835 continue
3836 3836 pstates = matches.get(parent, {}).get(copy or fn, [])
3837 3837 if pstates or states:
3838 3838 r = display(fn, ctx, pstates, states)
3839 3839 found = found or r
3840 3840 if r and not opts.get('all'):
3841 3841 skip[fn] = True
3842 3842 if copy:
3843 3843 skip[copy] = True
3844 3844 del matches[rev]
3845 3845 del revfiles[rev]
3846 3846
3847 3847 return not found
3848 3848
3849 3849 @command('heads',
3850 3850 [('r', 'rev', '',
3851 3851 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3852 3852 ('t', 'topo', False, _('show topological heads only')),
3853 3853 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3854 3854 ('c', 'closed', False, _('show normal and closed branch heads')),
3855 3855 ] + templateopts,
3856 3856 _('[-ct] [-r STARTREV] [REV]...'))
3857 3857 def heads(ui, repo, *branchrevs, **opts):
3858 3858 """show branch heads
3859 3859
3860 3860 With no arguments, show all open branch heads in the repository.
3861 3861 Branch heads are changesets that have no descendants on the
3862 3862 same branch. They are where development generally takes place and
3863 3863 are the usual targets for update and merge operations.
3864 3864
3865 3865 If one or more REVs are given, only open branch heads on the
3866 3866 branches associated with the specified changesets are shown. This
3867 3867 means that you can use :hg:`heads .` to see the heads on the
3868 3868 currently checked-out branch.
3869 3869
3870 3870 If -c/--closed is specified, also show branch heads marked closed
3871 3871 (see :hg:`commit --close-branch`).
3872 3872
3873 3873 If STARTREV is specified, only those heads that are descendants of
3874 3874 STARTREV will be displayed.
3875 3875
3876 3876 If -t/--topo is specified, named branch mechanics will be ignored and only
3877 3877 topological heads (changesets with no children) will be shown.
3878 3878
3879 3879 Returns 0 if matching heads are found, 1 if not.
3880 3880 """
3881 3881
3882 3882 start = None
3883 3883 if 'rev' in opts:
3884 3884 start = scmutil.revsingle(repo, opts['rev'], None).node()
3885 3885
3886 3886 if opts.get('topo'):
3887 3887 heads = [repo[h] for h in repo.heads(start)]
3888 3888 else:
3889 3889 heads = []
3890 3890 for branch in repo.branchmap():
3891 3891 heads += repo.branchheads(branch, start, opts.get('closed'))
3892 3892 heads = [repo[h] for h in heads]
3893 3893
3894 3894 if branchrevs:
3895 3895 branches = set(repo[br].branch() for br in branchrevs)
3896 3896 heads = [h for h in heads if h.branch() in branches]
3897 3897
3898 3898 if opts.get('active') and branchrevs:
3899 3899 dagheads = repo.heads(start)
3900 3900 heads = [h for h in heads if h.node() in dagheads]
3901 3901
3902 3902 if branchrevs:
3903 3903 haveheads = set(h.branch() for h in heads)
3904 3904 if branches - haveheads:
3905 3905 headless = ', '.join(b for b in branches - haveheads)
3906 3906 msg = _('no open branch heads found on branches %s')
3907 3907 if opts.get('rev'):
3908 3908 msg += _(' (started at %s)') % opts['rev']
3909 3909 ui.warn((msg + '\n') % headless)
3910 3910
3911 3911 if not heads:
3912 3912 return 1
3913 3913
3914 3914 heads = sorted(heads, key=lambda x: -x.rev())
3915 3915 displayer = cmdutil.show_changeset(ui, repo, opts)
3916 3916 for ctx in heads:
3917 3917 displayer.show(ctx)
3918 3918 displayer.close()
3919 3919
3920 3920 @command('help',
3921 3921 [('e', 'extension', None, _('show only help for extensions')),
3922 3922 ('c', 'command', None, _('show only help for commands')),
3923 3923 ('k', 'keyword', '', _('show topics matching keyword')),
3924 3924 ],
3925 3925 _('[-ec] [TOPIC]'),
3926 3926 norepo=True)
3927 3927 def help_(ui, name=None, **opts):
3928 3928 """show help for a given topic or a help overview
3929 3929
3930 3930 With no arguments, print a list of commands with short help messages.
3931 3931
3932 3932 Given a topic, extension, or command name, print help for that
3933 3933 topic.
3934 3934
3935 3935 Returns 0 if successful.
3936 3936 """
3937 3937
3938 3938 textwidth = min(ui.termwidth(), 80) - 2
3939 3939
3940 3940 keep = []
3941 3941 if ui.verbose:
3942 3942 keep.append('verbose')
3943 3943 if sys.platform.startswith('win'):
3944 3944 keep.append('windows')
3945 3945 elif sys.platform == 'OpenVMS':
3946 3946 keep.append('vms')
3947 3947 elif sys.platform == 'plan9':
3948 3948 keep.append('plan9')
3949 3949 else:
3950 3950 keep.append('unix')
3951 3951 keep.append(sys.platform.lower())
3952 3952
3953 3953 section = None
3954 3954 if name and '.' in name:
3955 3955 name, section = name.split('.', 1)
3956 3956
3957 3957 text = help.help_(ui, name, **opts)
3958 3958
3959 3959 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3960 3960 section=section)
3961 3961 if section and not formatted:
3962 3962 raise util.Abort(_("help section not found"))
3963 3963
3964 3964 if 'verbose' in pruned:
3965 3965 keep.append('omitted')
3966 3966 else:
3967 3967 keep.append('notomitted')
3968 3968 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3969 3969 section=section)
3970 3970 ui.write(formatted)
3971 3971
3972 3972
3973 3973 @command('identify|id',
3974 3974 [('r', 'rev', '',
3975 3975 _('identify the specified revision'), _('REV')),
3976 3976 ('n', 'num', None, _('show local revision number')),
3977 3977 ('i', 'id', None, _('show global revision id')),
3978 3978 ('b', 'branch', None, _('show branch')),
3979 3979 ('t', 'tags', None, _('show tags')),
3980 3980 ('B', 'bookmarks', None, _('show bookmarks')),
3981 3981 ] + remoteopts,
3982 3982 _('[-nibtB] [-r REV] [SOURCE]'),
3983 3983 optionalrepo=True)
3984 3984 def identify(ui, repo, source=None, rev=None,
3985 3985 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3986 3986 """identify the working directory or specified revision
3987 3987
3988 3988 Print a summary identifying the repository state at REV using one or
3989 3989 two parent hash identifiers, followed by a "+" if the working
3990 3990 directory has uncommitted changes, the branch name (if not default),
3991 3991 a list of tags, and a list of bookmarks.
3992 3992
3993 3993 When REV is not given, print a summary of the current state of the
3994 3994 repository.
3995 3995
3996 3996 Specifying a path to a repository root or Mercurial bundle will
3997 3997 cause lookup to operate on that repository/bundle.
3998 3998
3999 3999 .. container:: verbose
4000 4000
4001 4001 Examples:
4002 4002
4003 4003 - generate a build identifier for the working directory::
4004 4004
4005 4005 hg id --id > build-id.dat
4006 4006
4007 4007 - find the revision corresponding to a tag::
4008 4008
4009 4009 hg id -n -r 1.3
4010 4010
4011 4011 - check the most recent revision of a remote repository::
4012 4012
4013 4013 hg id -r tip http://selenic.com/hg/
4014 4014
4015 4015 Returns 0 if successful.
4016 4016 """
4017 4017
4018 4018 if not repo and not source:
4019 4019 raise util.Abort(_("there is no Mercurial repository here "
4020 4020 "(.hg not found)"))
4021 4021
4022 4022 if ui.debugflag:
4023 4023 hexfunc = hex
4024 4024 else:
4025 4025 hexfunc = short
4026 4026 default = not (num or id or branch or tags or bookmarks)
4027 4027 output = []
4028 4028 revs = []
4029 4029
4030 4030 if source:
4031 4031 source, branches = hg.parseurl(ui.expandpath(source))
4032 4032 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
4033 4033 repo = peer.local()
4034 4034 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
4035 4035
4036 4036 if not repo:
4037 4037 if num or branch or tags:
4038 4038 raise util.Abort(
4039 4039 _("can't query remote revision number, branch, or tags"))
4040 4040 if not rev and revs:
4041 4041 rev = revs[0]
4042 4042 if not rev:
4043 4043 rev = "tip"
4044 4044
4045 4045 remoterev = peer.lookup(rev)
4046 4046 if default or id:
4047 4047 output = [hexfunc(remoterev)]
4048 4048
4049 4049 def getbms():
4050 4050 bms = []
4051 4051
4052 4052 if 'bookmarks' in peer.listkeys('namespaces'):
4053 4053 hexremoterev = hex(remoterev)
4054 4054 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
4055 4055 if bmr == hexremoterev]
4056 4056
4057 4057 return sorted(bms)
4058 4058
4059 4059 if bookmarks:
4060 4060 output.extend(getbms())
4061 4061 elif default and not ui.quiet:
4062 4062 # multiple bookmarks for a single parent separated by '/'
4063 4063 bm = '/'.join(getbms())
4064 4064 if bm:
4065 4065 output.append(bm)
4066 4066 else:
4067 4067 if not rev:
4068 4068 ctx = repo[None]
4069 4069 parents = ctx.parents()
4070 4070 changed = ""
4071 4071 if default or id or num:
4072 4072 if (any(repo.status())
4073 4073 or any(ctx.sub(s).dirty() for s in ctx.substate)):
4074 4074 changed = '+'
4075 4075 if default or id:
4076 4076 output = ["%s%s" %
4077 4077 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
4078 4078 if num:
4079 4079 output.append("%s%s" %
4080 4080 ('+'.join([str(p.rev()) for p in parents]), changed))
4081 4081 else:
4082 4082 ctx = scmutil.revsingle(repo, rev)
4083 4083 if default or id:
4084 4084 output = [hexfunc(ctx.node())]
4085 4085 if num:
4086 4086 output.append(str(ctx.rev()))
4087 4087
4088 4088 if default and not ui.quiet:
4089 4089 b = ctx.branch()
4090 4090 if b != 'default':
4091 4091 output.append("(%s)" % b)
4092 4092
4093 4093 # multiple tags for a single parent separated by '/'
4094 4094 t = '/'.join(ctx.tags())
4095 4095 if t:
4096 4096 output.append(t)
4097 4097
4098 4098 # multiple bookmarks for a single parent separated by '/'
4099 4099 bm = '/'.join(ctx.bookmarks())
4100 4100 if bm:
4101 4101 output.append(bm)
4102 4102 else:
4103 4103 if branch:
4104 4104 output.append(ctx.branch())
4105 4105
4106 4106 if tags:
4107 4107 output.extend(ctx.tags())
4108 4108
4109 4109 if bookmarks:
4110 4110 output.extend(ctx.bookmarks())
4111 4111
4112 4112 ui.write("%s\n" % ' '.join(output))
4113 4113
4114 4114 @command('import|patch',
4115 4115 [('p', 'strip', 1,
4116 4116 _('directory strip option for patch. This has the same '
4117 4117 'meaning as the corresponding patch option'), _('NUM')),
4118 4118 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
4119 4119 ('e', 'edit', False, _('invoke editor on commit messages')),
4120 4120 ('f', 'force', None,
4121 4121 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
4122 4122 ('', 'no-commit', None,
4123 4123 _("don't commit, just update the working directory")),
4124 4124 ('', 'bypass', None,
4125 4125 _("apply patch without touching the working directory")),
4126 4126 ('', 'partial', None,
4127 4127 _('commit even if some hunks fail')),
4128 4128 ('', 'exact', None,
4129 4129 _('apply patch to the nodes from which it was generated')),
4130 4130 ('', 'prefix', '',
4131 4131 _('apply patch to subdirectory'), _('DIR')),
4132 4132 ('', 'import-branch', None,
4133 4133 _('use any branch information in patch (implied by --exact)'))] +
4134 4134 commitopts + commitopts2 + similarityopts,
4135 4135 _('[OPTION]... PATCH...'))
4136 4136 def import_(ui, repo, patch1=None, *patches, **opts):
4137 4137 """import an ordered set of patches
4138 4138
4139 4139 Import a list of patches and commit them individually (unless
4140 4140 --no-commit is specified).
4141 4141
4142 4142 Because import first applies changes to the working directory,
4143 4143 import will abort if there are outstanding changes.
4144 4144
4145 4145 You can import a patch straight from a mail message. Even patches
4146 4146 as attachments work (to use the body part, it must have type
4147 4147 text/plain or text/x-patch). From and Subject headers of email
4148 4148 message are used as default committer and commit message. All
4149 4149 text/plain body parts before first diff are added to commit
4150 4150 message.
4151 4151
4152 4152 If the imported patch was generated by :hg:`export`, user and
4153 4153 description from patch override values from message headers and
4154 4154 body. Values given on command line with -m/--message and -u/--user
4155 4155 override these.
4156 4156
4157 4157 If --exact is specified, import will set the working directory to
4158 4158 the parent of each patch before applying it, and will abort if the
4159 4159 resulting changeset has a different ID than the one recorded in
4160 4160 the patch. This may happen due to character set problems or other
4161 4161 deficiencies in the text patch format.
4162 4162
4163 4163 Use --bypass to apply and commit patches directly to the
4164 4164 repository, not touching the working directory. Without --exact,
4165 4165 patches will be applied on top of the working directory parent
4166 4166 revision.
4167 4167
4168 4168 With -s/--similarity, hg will attempt to discover renames and
4169 4169 copies in the patch in the same way as :hg:`addremove`.
4170 4170
4171 4171 Use --partial to ensure a changeset will be created from the patch
4172 4172 even if some hunks fail to apply. Hunks that fail to apply will be
4173 4173 written to a <target-file>.rej file. Conflicts can then be resolved
4174 4174 by hand before :hg:`commit --amend` is run to update the created
4175 4175 changeset. This flag exists to let people import patches that
4176 4176 partially apply without losing the associated metadata (author,
4177 4177 date, description, ...). Note that when none of the hunk applies
4178 4178 cleanly, :hg:`import --partial` will create an empty changeset,
4179 4179 importing only the patch metadata.
4180 4180
4181 4181 It is possible to use external patch programs to perform the patch
4182 4182 by setting the ``ui.patch`` configuration option. For the default
4183 4183 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4184 4184 See :hg:`help config` for more information about configuration
4185 4185 files and how to use these options.
4186 4186
4187 4187 To read a patch from standard input, use "-" as the patch name. If
4188 4188 a URL is specified, the patch will be downloaded from it.
4189 4189 See :hg:`help dates` for a list of formats valid for -d/--date.
4190 4190
4191 4191 .. container:: verbose
4192 4192
4193 4193 Examples:
4194 4194
4195 4195 - import a traditional patch from a website and detect renames::
4196 4196
4197 4197 hg import -s 80 http://example.com/bugfix.patch
4198 4198
4199 4199 - import a changeset from an hgweb server::
4200 4200
4201 4201 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
4202 4202
4203 4203 - import all the patches in an Unix-style mbox::
4204 4204
4205 4205 hg import incoming-patches.mbox
4206 4206
4207 4207 - attempt to exactly restore an exported changeset (not always
4208 4208 possible)::
4209 4209
4210 4210 hg import --exact proposed-fix.patch
4211 4211
4212 4212 - use an external tool to apply a patch which is too fuzzy for
4213 4213 the default internal tool.
4214 4214
4215 4215 hg import --config ui.patch="patch --merge" fuzzy.patch
4216 4216
4217 4217 - change the default fuzzing from 2 to a less strict 7
4218 4218
4219 4219 hg import --config ui.fuzz=7 fuzz.patch
4220 4220
4221 4221 Returns 0 on success, 1 on partial success (see --partial).
4222 4222 """
4223 4223
4224 4224 if not patch1:
4225 4225 raise util.Abort(_('need at least one patch to import'))
4226 4226
4227 4227 patches = (patch1,) + patches
4228 4228
4229 4229 date = opts.get('date')
4230 4230 if date:
4231 4231 opts['date'] = util.parsedate(date)
4232 4232
4233 4233 update = not opts.get('bypass')
4234 4234 if not update and opts.get('no_commit'):
4235 4235 raise util.Abort(_('cannot use --no-commit with --bypass'))
4236 4236 try:
4237 4237 sim = float(opts.get('similarity') or 0)
4238 4238 except ValueError:
4239 4239 raise util.Abort(_('similarity must be a number'))
4240 4240 if sim < 0 or sim > 100:
4241 4241 raise util.Abort(_('similarity must be between 0 and 100'))
4242 4242 if sim and not update:
4243 4243 raise util.Abort(_('cannot use --similarity with --bypass'))
4244 4244 if opts.get('exact') and opts.get('edit'):
4245 4245 raise util.Abort(_('cannot use --exact with --edit'))
4246 4246 if opts.get('exact') and opts.get('prefix'):
4247 4247 raise util.Abort(_('cannot use --exact with --prefix'))
4248 4248
4249 4249 if update:
4250 4250 cmdutil.checkunfinished(repo)
4251 4251 if (opts.get('exact') or not opts.get('force')) and update:
4252 4252 cmdutil.bailifchanged(repo)
4253 4253
4254 4254 base = opts["base"]
4255 4255 wlock = dsguard = lock = tr = None
4256 4256 msgs = []
4257 4257 ret = 0
4258 4258
4259 4259
4260 4260 try:
4261 4261 try:
4262 4262 wlock = repo.wlock()
4263 4263 dsguard = cmdutil.dirstateguard(repo, 'import')
4264 4264 if not opts.get('no_commit'):
4265 4265 lock = repo.lock()
4266 4266 tr = repo.transaction('import')
4267 4267 parents = repo.parents()
4268 4268 for patchurl in patches:
4269 4269 if patchurl == '-':
4270 4270 ui.status(_('applying patch from stdin\n'))
4271 4271 patchfile = ui.fin
4272 4272 patchurl = 'stdin' # for error message
4273 4273 else:
4274 4274 patchurl = os.path.join(base, patchurl)
4275 4275 ui.status(_('applying %s\n') % patchurl)
4276 4276 patchfile = hg.openpath(ui, patchurl)
4277 4277
4278 4278 haspatch = False
4279 4279 for hunk in patch.split(patchfile):
4280 4280 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
4281 4281 parents, opts,
4282 4282 msgs, hg.clean)
4283 4283 if msg:
4284 4284 haspatch = True
4285 4285 ui.note(msg + '\n')
4286 4286 if update or opts.get('exact'):
4287 4287 parents = repo.parents()
4288 4288 else:
4289 4289 parents = [repo[node]]
4290 4290 if rej:
4291 4291 ui.write_err(_("patch applied partially\n"))
4292 4292 ui.write_err(_("(fix the .rej files and run "
4293 4293 "`hg commit --amend`)\n"))
4294 4294 ret = 1
4295 4295 break
4296 4296
4297 4297 if not haspatch:
4298 4298 raise util.Abort(_('%s: no diffs found') % patchurl)
4299 4299
4300 4300 if tr:
4301 4301 tr.close()
4302 4302 if msgs:
4303 4303 repo.savecommitmessage('\n* * *\n'.join(msgs))
4304 4304 dsguard.close()
4305 4305 return ret
4306 4306 finally:
4307 4307 # TODO: get rid of this meaningless try/finally enclosing.
4308 4308 # this is kept only to reduce changes in a patch.
4309 4309 pass
4310 4310 finally:
4311 4311 if tr:
4312 4312 tr.release()
4313 4313 release(lock, dsguard, wlock)
4314 4314
4315 4315 @command('incoming|in',
4316 4316 [('f', 'force', None,
4317 4317 _('run even if remote repository is unrelated')),
4318 4318 ('n', 'newest-first', None, _('show newest record first')),
4319 4319 ('', 'bundle', '',
4320 4320 _('file to store the bundles into'), _('FILE')),
4321 4321 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4322 4322 ('B', 'bookmarks', False, _("compare bookmarks")),
4323 4323 ('b', 'branch', [],
4324 4324 _('a specific branch you would like to pull'), _('BRANCH')),
4325 4325 ] + logopts + remoteopts + subrepoopts,
4326 4326 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
4327 4327 def incoming(ui, repo, source="default", **opts):
4328 4328 """show new changesets found in source
4329 4329
4330 4330 Show new changesets found in the specified path/URL or the default
4331 4331 pull location. These are the changesets that would have been pulled
4332 4332 if a pull at the time you issued this command.
4333 4333
4334 4334 See pull for valid source format details.
4335 4335
4336 4336 .. container:: verbose
4337 4337
4338 4338 With -B/--bookmarks, the result of bookmark comparison between
4339 4339 local and remote repositories is displayed. With -v/--verbose,
4340 4340 status is also displayed for each bookmark like below::
4341 4341
4342 4342 BM1 01234567890a added
4343 4343 BM2 1234567890ab advanced
4344 4344 BM3 234567890abc diverged
4345 4345 BM4 34567890abcd changed
4346 4346
4347 4347 The action taken locally when pulling depends on the
4348 4348 status of each bookmark:
4349 4349
4350 4350 :``added``: pull will create it
4351 4351 :``advanced``: pull will update it
4352 4352 :``diverged``: pull will create a divergent bookmark
4353 4353 :``changed``: result depends on remote changesets
4354 4354
4355 4355 From the point of view of pulling behavior, bookmark
4356 4356 existing only in the remote repository are treated as ``added``,
4357 4357 even if it is in fact locally deleted.
4358 4358
4359 4359 .. container:: verbose
4360 4360
4361 4361 For remote repository, using --bundle avoids downloading the
4362 4362 changesets twice if the incoming is followed by a pull.
4363 4363
4364 4364 Examples:
4365 4365
4366 4366 - show incoming changes with patches and full description::
4367 4367
4368 4368 hg incoming -vp
4369 4369
4370 4370 - show incoming changes excluding merges, store a bundle::
4371 4371
4372 4372 hg in -vpM --bundle incoming.hg
4373 4373 hg pull incoming.hg
4374 4374
4375 4375 - briefly list changes inside a bundle::
4376 4376
4377 4377 hg in changes.hg -T "{desc|firstline}\\n"
4378 4378
4379 4379 Returns 0 if there are incoming changes, 1 otherwise.
4380 4380 """
4381 4381 if opts.get('graph'):
4382 4382 cmdutil.checkunsupportedgraphflags([], opts)
4383 4383 def display(other, chlist, displayer):
4384 4384 revdag = cmdutil.graphrevs(other, chlist, opts)
4385 4385 showparents = [ctx.node() for ctx in repo[None].parents()]
4386 4386 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4387 4387 graphmod.asciiedges)
4388 4388
4389 4389 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4390 4390 return 0
4391 4391
4392 4392 if opts.get('bundle') and opts.get('subrepos'):
4393 4393 raise util.Abort(_('cannot combine --bundle and --subrepos'))
4394 4394
4395 4395 if opts.get('bookmarks'):
4396 4396 source, branches = hg.parseurl(ui.expandpath(source),
4397 4397 opts.get('branch'))
4398 4398 other = hg.peer(repo, opts, source)
4399 4399 if 'bookmarks' not in other.listkeys('namespaces'):
4400 4400 ui.warn(_("remote doesn't support bookmarks\n"))
4401 4401 return 0
4402 4402 ui.status(_('comparing with %s\n') % util.hidepassword(source))
4403 4403 return bookmarks.incoming(ui, repo, other)
4404 4404
4405 4405 repo._subtoppath = ui.expandpath(source)
4406 4406 try:
4407 4407 return hg.incoming(ui, repo, source, opts)
4408 4408 finally:
4409 4409 del repo._subtoppath
4410 4410
4411 4411
4412 4412 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
4413 4413 norepo=True)
4414 4414 def init(ui, dest=".", **opts):
4415 4415 """create a new repository in the given directory
4416 4416
4417 4417 Initialize a new repository in the given directory. If the given
4418 4418 directory does not exist, it will be created.
4419 4419
4420 4420 If no directory is given, the current directory is used.
4421 4421
4422 4422 It is possible to specify an ``ssh://`` URL as the destination.
4423 4423 See :hg:`help urls` for more information.
4424 4424
4425 4425 Returns 0 on success.
4426 4426 """
4427 4427 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4428 4428
4429 4429 @command('locate',
4430 4430 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
4431 4431 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4432 4432 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
4433 4433 ] + walkopts,
4434 4434 _('[OPTION]... [PATTERN]...'))
4435 4435 def locate(ui, repo, *pats, **opts):
4436 4436 """locate files matching specific patterns (DEPRECATED)
4437 4437
4438 4438 Print files under Mercurial control in the working directory whose
4439 4439 names match the given patterns.
4440 4440
4441 4441 By default, this command searches all directories in the working
4442 4442 directory. To search just the current directory and its
4443 4443 subdirectories, use "--include .".
4444 4444
4445 4445 If no patterns are given to match, this command prints the names
4446 4446 of all files under Mercurial control in the working directory.
4447 4447
4448 4448 If you want to feed the output of this command into the "xargs"
4449 4449 command, use the -0 option to both this command and "xargs". This
4450 4450 will avoid the problem of "xargs" treating single filenames that
4451 4451 contain whitespace as multiple filenames.
4452 4452
4453 4453 See :hg:`help files` for a more versatile command.
4454 4454
4455 4455 Returns 0 if a match is found, 1 otherwise.
4456 4456 """
4457 4457 if opts.get('print0'):
4458 4458 end = '\0'
4459 4459 else:
4460 4460 end = '\n'
4461 4461 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
4462 4462
4463 4463 ret = 1
4464 4464 ctx = repo[rev]
4465 4465 m = scmutil.match(ctx, pats, opts, default='relglob',
4466 4466 badfn=lambda x, y: False)
4467 4467
4468 4468 for abs in ctx.matches(m):
4469 4469 if opts.get('fullpath'):
4470 4470 ui.write(repo.wjoin(abs), end)
4471 4471 else:
4472 4472 ui.write(((pats and m.rel(abs)) or abs), end)
4473 4473 ret = 0
4474 4474
4475 4475 return ret
4476 4476
4477 4477 @command('^log|history',
4478 4478 [('f', 'follow', None,
4479 4479 _('follow changeset history, or file history across copies and renames')),
4480 4480 ('', 'follow-first', None,
4481 4481 _('only follow the first parent of merge changesets (DEPRECATED)')),
4482 4482 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
4483 4483 ('C', 'copies', None, _('show copied files')),
4484 4484 ('k', 'keyword', [],
4485 4485 _('do case-insensitive search for a given text'), _('TEXT')),
4486 4486 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
4487 4487 ('', 'removed', None, _('include revisions where files were removed')),
4488 4488 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4489 4489 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4490 4490 ('', 'only-branch', [],
4491 4491 _('show only changesets within the given named branch (DEPRECATED)'),
4492 4492 _('BRANCH')),
4493 4493 ('b', 'branch', [],
4494 4494 _('show changesets within the given named branch'), _('BRANCH')),
4495 4495 ('P', 'prune', [],
4496 4496 _('do not display revision or any of its ancestors'), _('REV')),
4497 4497 ] + logopts + walkopts,
4498 4498 _('[OPTION]... [FILE]'),
4499 4499 inferrepo=True)
4500 4500 def log(ui, repo, *pats, **opts):
4501 4501 """show revision history of entire repository or files
4502 4502
4503 4503 Print the revision history of the specified files or the entire
4504 4504 project.
4505 4505
4506 4506 If no revision range is specified, the default is ``tip:0`` unless
4507 4507 --follow is set, in which case the working directory parent is
4508 4508 used as the starting revision.
4509 4509
4510 4510 File history is shown without following rename or copy history of
4511 4511 files. Use -f/--follow with a filename to follow history across
4512 4512 renames and copies. --follow without a filename will only show
4513 4513 ancestors or descendants of the starting revision.
4514 4514
4515 4515 By default this command prints revision number and changeset id,
4516 4516 tags, non-trivial parents, user, date and time, and a summary for
4517 4517 each commit. When the -v/--verbose switch is used, the list of
4518 4518 changed files and full commit message are shown.
4519 4519
4520 4520 With --graph the revisions are shown as an ASCII art DAG with the most
4521 4521 recent changeset at the top.
4522 4522 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4523 4523 and '+' represents a fork where the changeset from the lines below is a
4524 4524 parent of the 'o' merge on the same line.
4525 4525
4526 4526 .. note::
4527 4527
4528 4528 log -p/--patch may generate unexpected diff output for merge
4529 4529 changesets, as it will only compare the merge changeset against
4530 4530 its first parent. Also, only files different from BOTH parents
4531 4531 will appear in files:.
4532 4532
4533 4533 .. note::
4534 4534
4535 4535 for performance reasons, log FILE may omit duplicate changes
4536 4536 made on branches and will not show removals or mode changes. To
4537 4537 see all such changes, use the --removed switch.
4538 4538
4539 4539 .. container:: verbose
4540 4540
4541 4541 Some examples:
4542 4542
4543 4543 - changesets with full descriptions and file lists::
4544 4544
4545 4545 hg log -v
4546 4546
4547 4547 - changesets ancestral to the working directory::
4548 4548
4549 4549 hg log -f
4550 4550
4551 4551 - last 10 commits on the current branch::
4552 4552
4553 4553 hg log -l 10 -b .
4554 4554
4555 4555 - changesets showing all modifications of a file, including removals::
4556 4556
4557 4557 hg log --removed file.c
4558 4558
4559 4559 - all changesets that touch a directory, with diffs, excluding merges::
4560 4560
4561 4561 hg log -Mp lib/
4562 4562
4563 4563 - all revision numbers that match a keyword::
4564 4564
4565 4565 hg log -k bug --template "{rev}\\n"
4566 4566
4567 4567 - list available log templates::
4568 4568
4569 4569 hg log -T list
4570 4570
4571 4571 - check if a given changeset is included in a tagged release::
4572 4572
4573 4573 hg log -r "a21ccf and ancestor(1.9)"
4574 4574
4575 4575 - find all changesets by some user in a date range::
4576 4576
4577 4577 hg log -k alice -d "may 2008 to jul 2008"
4578 4578
4579 4579 - summary of all changesets after the last tag::
4580 4580
4581 4581 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4582 4582
4583 4583 See :hg:`help dates` for a list of formats valid for -d/--date.
4584 4584
4585 4585 See :hg:`help revisions` and :hg:`help revsets` for more about
4586 4586 specifying revisions.
4587 4587
4588 4588 See :hg:`help templates` for more about pre-packaged styles and
4589 4589 specifying custom templates.
4590 4590
4591 4591 Returns 0 on success.
4592 4592
4593 4593 """
4594 4594 if opts.get('follow') and opts.get('rev'):
4595 4595 opts['rev'] = [revset.formatspec('reverse(::%lr)', opts.get('rev'))]
4596 4596 del opts['follow']
4597 4597
4598 4598 if opts.get('graph'):
4599 4599 return cmdutil.graphlog(ui, repo, *pats, **opts)
4600 4600
4601 4601 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4602 4602 limit = cmdutil.loglimit(opts)
4603 4603 count = 0
4604 4604
4605 4605 getrenamed = None
4606 4606 if opts.get('copies'):
4607 4607 endrev = None
4608 4608 if opts.get('rev'):
4609 4609 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4610 4610 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4611 4611
4612 4612 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4613 4613 for rev in revs:
4614 4614 if count == limit:
4615 4615 break
4616 4616 ctx = repo[rev]
4617 4617 copies = None
4618 4618 if getrenamed is not None and rev:
4619 4619 copies = []
4620 4620 for fn in ctx.files():
4621 4621 rename = getrenamed(fn, rev)
4622 4622 if rename:
4623 4623 copies.append((fn, rename[0]))
4624 4624 if filematcher:
4625 4625 revmatchfn = filematcher(ctx.rev())
4626 4626 else:
4627 4627 revmatchfn = None
4628 4628 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4629 4629 if displayer.flush(rev):
4630 4630 count += 1
4631 4631
4632 4632 displayer.close()
4633 4633
4634 4634 @command('manifest',
4635 4635 [('r', 'rev', '', _('revision to display'), _('REV')),
4636 4636 ('', 'all', False, _("list files from all revisions"))]
4637 4637 + formatteropts,
4638 4638 _('[-r REV]'))
4639 4639 def manifest(ui, repo, node=None, rev=None, **opts):
4640 4640 """output the current or given revision of the project manifest
4641 4641
4642 4642 Print a list of version controlled files for the given revision.
4643 4643 If no revision is given, the first parent of the working directory
4644 4644 is used, or the null revision if no revision is checked out.
4645 4645
4646 4646 With -v, print file permissions, symlink and executable bits.
4647 4647 With --debug, print file revision hashes.
4648 4648
4649 4649 If option --all is specified, the list of all files from all revisions
4650 4650 is printed. This includes deleted and renamed files.
4651 4651
4652 4652 Returns 0 on success.
4653 4653 """
4654 4654
4655 4655 fm = ui.formatter('manifest', opts)
4656 4656
4657 4657 if opts.get('all'):
4658 4658 if rev or node:
4659 4659 raise util.Abort(_("can't specify a revision with --all"))
4660 4660
4661 4661 res = []
4662 4662 prefix = "data/"
4663 4663 suffix = ".i"
4664 4664 plen = len(prefix)
4665 4665 slen = len(suffix)
4666 4666 lock = repo.lock()
4667 4667 try:
4668 4668 for fn, b, size in repo.store.datafiles():
4669 4669 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4670 4670 res.append(fn[plen:-slen])
4671 4671 finally:
4672 4672 lock.release()
4673 4673 for f in res:
4674 4674 fm.startitem()
4675 4675 fm.write("path", '%s\n', f)
4676 4676 fm.end()
4677 4677 return
4678 4678
4679 4679 if rev and node:
4680 4680 raise util.Abort(_("please specify just one revision"))
4681 4681
4682 4682 if not node:
4683 4683 node = rev
4684 4684
4685 4685 char = {'l': '@', 'x': '*', '': ''}
4686 4686 mode = {'l': '644', 'x': '755', '': '644'}
4687 4687 ctx = scmutil.revsingle(repo, node)
4688 4688 mf = ctx.manifest()
4689 4689 for f in ctx:
4690 4690 fm.startitem()
4691 4691 fl = ctx[f].flags()
4692 4692 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4693 4693 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4694 4694 fm.write('path', '%s\n', f)
4695 4695 fm.end()
4696 4696
4697 4697 @command('^merge',
4698 4698 [('f', 'force', None,
4699 4699 _('force a merge including outstanding changes (DEPRECATED)')),
4700 4700 ('r', 'rev', '', _('revision to merge'), _('REV')),
4701 4701 ('P', 'preview', None,
4702 4702 _('review revisions to merge (no merge is performed)'))
4703 4703 ] + mergetoolopts,
4704 4704 _('[-P] [-f] [[-r] REV]'))
4705 4705 def merge(ui, repo, node=None, **opts):
4706 4706 """merge another revision into working directory
4707 4707
4708 4708 The current working directory is updated with all changes made in
4709 4709 the requested revision since the last common predecessor revision.
4710 4710
4711 4711 Files that changed between either parent are marked as changed for
4712 4712 the next commit and a commit must be performed before any further
4713 4713 updates to the repository are allowed. The next commit will have
4714 4714 two parents.
4715 4715
4716 4716 ``--tool`` can be used to specify the merge tool used for file
4717 4717 merges. It overrides the HGMERGE environment variable and your
4718 4718 configuration files. See :hg:`help merge-tools` for options.
4719 4719
4720 4720 If no revision is specified, the working directory's parent is a
4721 4721 head revision, and the current branch contains exactly one other
4722 4722 head, the other head is merged with by default. Otherwise, an
4723 4723 explicit revision with which to merge with must be provided.
4724 4724
4725 4725 :hg:`resolve` must be used to resolve unresolved files.
4726 4726
4727 4727 To undo an uncommitted merge, use :hg:`update --clean .` which
4728 4728 will check out a clean copy of the original merge parent, losing
4729 4729 all changes.
4730 4730
4731 4731 Returns 0 on success, 1 if there are unresolved files.
4732 4732 """
4733 4733
4734 4734 if opts.get('rev') and node:
4735 4735 raise util.Abort(_("please specify just one revision"))
4736 4736 if not node:
4737 4737 node = opts.get('rev')
4738 4738
4739 4739 if node:
4740 4740 node = scmutil.revsingle(repo, node).node()
4741 4741
4742 4742 if not node and repo._activebookmark:
4743 4743 bmheads = repo.bookmarkheads(repo._activebookmark)
4744 4744 curhead = repo[repo._activebookmark].node()
4745 4745 if len(bmheads) == 2:
4746 4746 if curhead == bmheads[0]:
4747 4747 node = bmheads[1]
4748 4748 else:
4749 4749 node = bmheads[0]
4750 4750 elif len(bmheads) > 2:
4751 4751 raise util.Abort(_("multiple matching bookmarks to merge - "
4752 4752 "please merge with an explicit rev or bookmark"),
4753 4753 hint=_("run 'hg heads' to see all heads"))
4754 4754 elif len(bmheads) <= 1:
4755 4755 raise util.Abort(_("no matching bookmark to merge - "
4756 4756 "please merge with an explicit rev or bookmark"),
4757 4757 hint=_("run 'hg heads' to see all heads"))
4758 4758
4759 4759 if not node and not repo._activebookmark:
4760 4760 branch = repo[None].branch()
4761 4761 bheads = repo.branchheads(branch)
4762 4762 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4763 4763
4764 4764 if len(nbhs) > 2:
4765 4765 raise util.Abort(_("branch '%s' has %d heads - "
4766 4766 "please merge with an explicit rev")
4767 4767 % (branch, len(bheads)),
4768 4768 hint=_("run 'hg heads .' to see heads"))
4769 4769
4770 4770 parent = repo.dirstate.p1()
4771 4771 if len(nbhs) <= 1:
4772 4772 if len(bheads) > 1:
4773 4773 raise util.Abort(_("heads are bookmarked - "
4774 4774 "please merge with an explicit rev"),
4775 4775 hint=_("run 'hg heads' to see all heads"))
4776 4776 if len(repo.heads()) > 1:
4777 4777 raise util.Abort(_("branch '%s' has one head - "
4778 4778 "please merge with an explicit rev")
4779 4779 % branch,
4780 4780 hint=_("run 'hg heads' to see all heads"))
4781 4781 msg, hint = _('nothing to merge'), None
4782 4782 if parent != repo.lookup(branch):
4783 4783 hint = _("use 'hg update' instead")
4784 4784 raise util.Abort(msg, hint=hint)
4785 4785
4786 4786 if parent not in bheads:
4787 4787 raise util.Abort(_('working directory not at a head revision'),
4788 4788 hint=_("use 'hg update' or merge with an "
4789 4789 "explicit revision"))
4790 4790 if parent == nbhs[0]:
4791 4791 node = nbhs[-1]
4792 4792 else:
4793 4793 node = nbhs[0]
4794 4794
4795 4795 if opts.get('preview'):
4796 4796 # find nodes that are ancestors of p2 but not of p1
4797 4797 p1 = repo.lookup('.')
4798 4798 p2 = repo.lookup(node)
4799 4799 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4800 4800
4801 4801 displayer = cmdutil.show_changeset(ui, repo, opts)
4802 4802 for node in nodes:
4803 4803 displayer.show(repo[node])
4804 4804 displayer.close()
4805 4805 return 0
4806 4806
4807 4807 try:
4808 4808 # ui.forcemerge is an internal variable, do not document
4809 4809 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4810 4810 return hg.merge(repo, node, force=opts.get('force'))
4811 4811 finally:
4812 4812 ui.setconfig('ui', 'forcemerge', '', 'merge')
4813 4813
4814 4814 @command('outgoing|out',
4815 4815 [('f', 'force', None, _('run even when the destination is unrelated')),
4816 4816 ('r', 'rev', [],
4817 4817 _('a changeset intended to be included in the destination'), _('REV')),
4818 4818 ('n', 'newest-first', None, _('show newest record first')),
4819 4819 ('B', 'bookmarks', False, _('compare bookmarks')),
4820 4820 ('b', 'branch', [], _('a specific branch you would like to push'),
4821 4821 _('BRANCH')),
4822 4822 ] + logopts + remoteopts + subrepoopts,
4823 4823 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4824 4824 def outgoing(ui, repo, dest=None, **opts):
4825 4825 """show changesets not found in the destination
4826 4826
4827 4827 Show changesets not found in the specified destination repository
4828 4828 or the default push location. These are the changesets that would
4829 4829 be pushed if a push was requested.
4830 4830
4831 4831 See pull for details of valid destination formats.
4832 4832
4833 4833 .. container:: verbose
4834 4834
4835 4835 With -B/--bookmarks, the result of bookmark comparison between
4836 4836 local and remote repositories is displayed. With -v/--verbose,
4837 4837 status is also displayed for each bookmark like below::
4838 4838
4839 4839 BM1 01234567890a added
4840 4840 BM2 deleted
4841 4841 BM3 234567890abc advanced
4842 4842 BM4 34567890abcd diverged
4843 4843 BM5 4567890abcde changed
4844 4844
4845 4845 The action taken when pushing depends on the
4846 4846 status of each bookmark:
4847 4847
4848 4848 :``added``: push with ``-B`` will create it
4849 4849 :``deleted``: push with ``-B`` will delete it
4850 4850 :``advanced``: push will update it
4851 4851 :``diverged``: push with ``-B`` will update it
4852 4852 :``changed``: push with ``-B`` will update it
4853 4853
4854 4854 From the point of view of pushing behavior, bookmarks
4855 4855 existing only in the remote repository are treated as
4856 4856 ``deleted``, even if it is in fact added remotely.
4857 4857
4858 4858 Returns 0 if there are outgoing changes, 1 otherwise.
4859 4859 """
4860 4860 if opts.get('graph'):
4861 4861 cmdutil.checkunsupportedgraphflags([], opts)
4862 4862 o, other = hg._outgoing(ui, repo, dest, opts)
4863 4863 if not o:
4864 4864 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4865 4865 return
4866 4866
4867 4867 revdag = cmdutil.graphrevs(repo, o, opts)
4868 4868 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4869 4869 showparents = [ctx.node() for ctx in repo[None].parents()]
4870 4870 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4871 4871 graphmod.asciiedges)
4872 4872 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4873 4873 return 0
4874 4874
4875 4875 if opts.get('bookmarks'):
4876 4876 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4877 4877 dest, branches = hg.parseurl(dest, opts.get('branch'))
4878 4878 other = hg.peer(repo, opts, dest)
4879 4879 if 'bookmarks' not in other.listkeys('namespaces'):
4880 4880 ui.warn(_("remote doesn't support bookmarks\n"))
4881 4881 return 0
4882 4882 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4883 4883 return bookmarks.outgoing(ui, repo, other)
4884 4884
4885 4885 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4886 4886 try:
4887 4887 return hg.outgoing(ui, repo, dest, opts)
4888 4888 finally:
4889 4889 del repo._subtoppath
4890 4890
4891 4891 @command('parents',
4892 4892 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4893 4893 ] + templateopts,
4894 4894 _('[-r REV] [FILE]'),
4895 4895 inferrepo=True)
4896 4896 def parents(ui, repo, file_=None, **opts):
4897 4897 """show the parents of the working directory or revision (DEPRECATED)
4898 4898
4899 4899 Print the working directory's parent revisions. If a revision is
4900 4900 given via -r/--rev, the parent of that revision will be printed.
4901 4901 If a file argument is given, the revision in which the file was
4902 4902 last changed (before the working directory revision or the
4903 4903 argument to --rev if given) is printed.
4904 4904
4905 4905 See :hg:`summary` and :hg:`help revsets` for related information.
4906 4906
4907 4907 Returns 0 on success.
4908 4908 """
4909 4909
4910 4910 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4911 4911
4912 4912 if file_:
4913 4913 m = scmutil.match(ctx, (file_,), opts)
4914 4914 if m.anypats() or len(m.files()) != 1:
4915 4915 raise util.Abort(_('can only specify an explicit filename'))
4916 4916 file_ = m.files()[0]
4917 4917 filenodes = []
4918 4918 for cp in ctx.parents():
4919 4919 if not cp:
4920 4920 continue
4921 4921 try:
4922 4922 filenodes.append(cp.filenode(file_))
4923 4923 except error.LookupError:
4924 4924 pass
4925 4925 if not filenodes:
4926 4926 raise util.Abort(_("'%s' not found in manifest!") % file_)
4927 4927 p = []
4928 4928 for fn in filenodes:
4929 4929 fctx = repo.filectx(file_, fileid=fn)
4930 4930 p.append(fctx.node())
4931 4931 else:
4932 4932 p = [cp.node() for cp in ctx.parents()]
4933 4933
4934 4934 displayer = cmdutil.show_changeset(ui, repo, opts)
4935 4935 for n in p:
4936 4936 if n != nullid:
4937 4937 displayer.show(repo[n])
4938 4938 displayer.close()
4939 4939
4940 4940 @command('paths', [], _('[NAME]'), optionalrepo=True)
4941 4941 def paths(ui, repo, search=None):
4942 4942 """show aliases for remote repositories
4943 4943
4944 4944 Show definition of symbolic path name NAME. If no name is given,
4945 4945 show definition of all available names.
4946 4946
4947 4947 Option -q/--quiet suppresses all output when searching for NAME
4948 4948 and shows only the path names when listing all definitions.
4949 4949
4950 4950 Path names are defined in the [paths] section of your
4951 4951 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4952 4952 repository, ``.hg/hgrc`` is used, too.
4953 4953
4954 4954 The path names ``default`` and ``default-push`` have a special
4955 4955 meaning. When performing a push or pull operation, they are used
4956 4956 as fallbacks if no location is specified on the command-line.
4957 4957 When ``default-push`` is set, it will be used for push and
4958 4958 ``default`` will be used for pull; otherwise ``default`` is used
4959 4959 as the fallback for both. When cloning a repository, the clone
4960 4960 source is written as ``default`` in ``.hg/hgrc``. Note that
4961 4961 ``default`` and ``default-push`` apply to all inbound (e.g.
4962 4962 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4963 4963 :hg:`bundle`) operations.
4964 4964
4965 4965 See :hg:`help urls` for more information.
4966 4966
4967 4967 Returns 0 on success.
4968 4968 """
4969 4969 if search:
4970 4970 for name, path in sorted(ui.paths.iteritems()):
4971 4971 if name == search:
4972 4972 ui.status("%s\n" % util.hidepassword(path.loc))
4973 4973 return
4974 4974 if not ui.quiet:
4975 4975 ui.warn(_("not found!\n"))
4976 4976 return 1
4977 4977 else:
4978 4978 for name, path in sorted(ui.paths.iteritems()):
4979 4979 if ui.quiet:
4980 4980 ui.write("%s\n" % name)
4981 4981 else:
4982 4982 ui.write("%s = %s\n" % (name,
4983 4983 util.hidepassword(path.loc)))
4984 4984
4985 4985 @command('phase',
4986 4986 [('p', 'public', False, _('set changeset phase to public')),
4987 4987 ('d', 'draft', False, _('set changeset phase to draft')),
4988 4988 ('s', 'secret', False, _('set changeset phase to secret')),
4989 4989 ('f', 'force', False, _('allow to move boundary backward')),
4990 4990 ('r', 'rev', [], _('target revision'), _('REV')),
4991 4991 ],
4992 4992 _('[-p|-d|-s] [-f] [-r] [REV...]'))
4993 4993 def phase(ui, repo, *revs, **opts):
4994 4994 """set or show the current phase name
4995 4995
4996 4996 With no argument, show the phase name of the current revision(s).
4997 4997
4998 4998 With one of -p/--public, -d/--draft or -s/--secret, change the
4999 4999 phase value of the specified revisions.
5000 5000
5001 5001 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
5002 5002 lower phase to an higher phase. Phases are ordered as follows::
5003 5003
5004 5004 public < draft < secret
5005 5005
5006 5006 Returns 0 on success, 1 if no phases were changed or some could not
5007 5007 be changed.
5008 5008
5009 5009 (For more information about the phases concept, see :hg:`help phases`.)
5010 5010 """
5011 5011 # search for a unique phase argument
5012 5012 targetphase = None
5013 5013 for idx, name in enumerate(phases.phasenames):
5014 5014 if opts[name]:
5015 5015 if targetphase is not None:
5016 5016 raise util.Abort(_('only one phase can be specified'))
5017 5017 targetphase = idx
5018 5018
5019 5019 # look for specified revision
5020 5020 revs = list(revs)
5021 5021 revs.extend(opts['rev'])
5022 5022 if not revs:
5023 5023 # display both parents as the second parent phase can influence
5024 5024 # the phase of a merge commit
5025 5025 revs = [c.rev() for c in repo[None].parents()]
5026 5026
5027 5027 revs = scmutil.revrange(repo, revs)
5028 5028
5029 5029 lock = None
5030 5030 ret = 0
5031 5031 if targetphase is None:
5032 5032 # display
5033 5033 for r in revs:
5034 5034 ctx = repo[r]
5035 5035 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
5036 5036 else:
5037 5037 tr = None
5038 5038 lock = repo.lock()
5039 5039 try:
5040 5040 tr = repo.transaction("phase")
5041 5041 # set phase
5042 5042 if not revs:
5043 5043 raise util.Abort(_('empty revision set'))
5044 5044 nodes = [repo[r].node() for r in revs]
5045 5045 # moving revision from public to draft may hide them
5046 5046 # We have to check result on an unfiltered repository
5047 5047 unfi = repo.unfiltered()
5048 5048 getphase = unfi._phasecache.phase
5049 5049 olddata = [getphase(unfi, r) for r in unfi]
5050 5050 phases.advanceboundary(repo, tr, targetphase, nodes)
5051 5051 if opts['force']:
5052 5052 phases.retractboundary(repo, tr, targetphase, nodes)
5053 5053 tr.close()
5054 5054 finally:
5055 5055 if tr is not None:
5056 5056 tr.release()
5057 5057 lock.release()
5058 5058 getphase = unfi._phasecache.phase
5059 5059 newdata = [getphase(unfi, r) for r in unfi]
5060 5060 changes = sum(newdata[r] != olddata[r] for r in unfi)
5061 5061 cl = unfi.changelog
5062 5062 rejected = [n for n in nodes
5063 5063 if newdata[cl.rev(n)] < targetphase]
5064 5064 if rejected:
5065 5065 ui.warn(_('cannot move %i changesets to a higher '
5066 5066 'phase, use --force\n') % len(rejected))
5067 5067 ret = 1
5068 5068 if changes:
5069 5069 msg = _('phase changed for %i changesets\n') % changes
5070 5070 if ret:
5071 5071 ui.status(msg)
5072 5072 else:
5073 5073 ui.note(msg)
5074 5074 else:
5075 5075 ui.warn(_('no phases changed\n'))
5076 5076 ret = 1
5077 5077 return ret
5078 5078
5079 5079 def postincoming(ui, repo, modheads, optupdate, checkout):
5080 5080 if modheads == 0:
5081 5081 return
5082 5082 if optupdate:
5083 5083 checkout, movemarkfrom = bookmarks.calculateupdate(ui, repo, checkout)
5084 5084 try:
5085 5085 ret = hg.update(repo, checkout)
5086 5086 except util.Abort, inst:
5087 5087 ui.warn(_("not updating: %s\n") % str(inst))
5088 5088 if inst.hint:
5089 5089 ui.warn(_("(%s)\n") % inst.hint)
5090 5090 return 0
5091 5091 if not ret and not checkout:
5092 5092 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5093 5093 ui.status(_("updating bookmark %s\n") % repo._activebookmark)
5094 5094 return ret
5095 5095 if modheads > 1:
5096 5096 currentbranchheads = len(repo.branchheads())
5097 5097 if currentbranchheads == modheads:
5098 5098 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
5099 5099 elif currentbranchheads > 1:
5100 5100 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
5101 5101 "merge)\n"))
5102 5102 else:
5103 5103 ui.status(_("(run 'hg heads' to see heads)\n"))
5104 5104 else:
5105 5105 ui.status(_("(run 'hg update' to get a working copy)\n"))
5106 5106
5107 5107 @command('^pull',
5108 5108 [('u', 'update', None,
5109 5109 _('update to new branch head if changesets were pulled')),
5110 5110 ('f', 'force', None, _('run even when remote repository is unrelated')),
5111 5111 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
5112 5112 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
5113 5113 ('b', 'branch', [], _('a specific branch you would like to pull'),
5114 5114 _('BRANCH')),
5115 5115 ] + remoteopts,
5116 5116 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
5117 5117 def pull(ui, repo, source="default", **opts):
5118 5118 """pull changes from the specified source
5119 5119
5120 5120 Pull changes from a remote repository to a local one.
5121 5121
5122 5122 This finds all changes from the repository at the specified path
5123 5123 or URL and adds them to a local repository (the current one unless
5124 5124 -R is specified). By default, this does not update the copy of the
5125 5125 project in the working directory.
5126 5126
5127 5127 Use :hg:`incoming` if you want to see what would have been added
5128 5128 by a pull at the time you issued this command. If you then decide
5129 5129 to add those changes to the repository, you should use :hg:`pull
5130 5130 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5131 5131
5132 5132 If SOURCE is omitted, the 'default' path will be used.
5133 5133 See :hg:`help urls` for more information.
5134 5134
5135 5135 Returns 0 on success, 1 if an update had unresolved files.
5136 5136 """
5137 5137 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
5138 5138 ui.status(_('pulling from %s\n') % util.hidepassword(source))
5139 5139 other = hg.peer(repo, opts, source)
5140 5140 try:
5141 5141 revs, checkout = hg.addbranchrevs(repo, other, branches,
5142 5142 opts.get('rev'))
5143 5143
5144 5144
5145 5145 pullopargs = {}
5146 5146 if opts.get('bookmark'):
5147 5147 if not revs:
5148 5148 revs = []
5149 5149 # The list of bookmark used here is not the one used to actually
5150 5150 # update the bookmark name. This can result in the revision pulled
5151 5151 # not ending up with the name of the bookmark because of a race
5152 5152 # condition on the server. (See issue 4689 for details)
5153 5153 remotebookmarks = other.listkeys('bookmarks')
5154 5154 pullopargs['remotebookmarks'] = remotebookmarks
5155 5155 for b in opts['bookmark']:
5156 5156 if b not in remotebookmarks:
5157 5157 raise util.Abort(_('remote bookmark %s not found!') % b)
5158 5158 revs.append(remotebookmarks[b])
5159 5159
5160 5160 if revs:
5161 5161 try:
5162 5162 # When 'rev' is a bookmark name, we cannot guarantee that it
5163 5163 # will be updated with that name because of a race condition
5164 5164 # server side. (See issue 4689 for details)
5165 5165 oldrevs = revs
5166 5166 revs = [] # actually, nodes
5167 5167 for r in oldrevs:
5168 5168 node = other.lookup(r)
5169 5169 revs.append(node)
5170 5170 if r == checkout:
5171 5171 checkout = node
5172 5172 except error.CapabilityError:
5173 5173 err = _("other repository doesn't support revision lookup, "
5174 5174 "so a rev cannot be specified.")
5175 5175 raise util.Abort(err)
5176 5176
5177 5177 modheads = exchange.pull(repo, other, heads=revs,
5178 5178 force=opts.get('force'),
5179 5179 bookmarks=opts.get('bookmark', ()),
5180 5180 opargs=pullopargs).cgresult
5181 5181 if checkout:
5182 5182 checkout = str(repo.changelog.rev(checkout))
5183 5183 repo._subtoppath = source
5184 5184 try:
5185 5185 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
5186 5186
5187 5187 finally:
5188 5188 del repo._subtoppath
5189 5189
5190 5190 finally:
5191 5191 other.close()
5192 5192 return ret
5193 5193
5194 5194 @command('^push',
5195 5195 [('f', 'force', None, _('force push')),
5196 5196 ('r', 'rev', [],
5197 5197 _('a changeset intended to be included in the destination'),
5198 5198 _('REV')),
5199 5199 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
5200 5200 ('b', 'branch', [],
5201 5201 _('a specific branch you would like to push'), _('BRANCH')),
5202 5202 ('', 'new-branch', False, _('allow pushing a new branch')),
5203 5203 ] + remoteopts,
5204 5204 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
5205 5205 def push(ui, repo, dest=None, **opts):
5206 5206 """push changes to the specified destination
5207 5207
5208 5208 Push changesets from the local repository to the specified
5209 5209 destination.
5210 5210
5211 5211 This operation is symmetrical to pull: it is identical to a pull
5212 5212 in the destination repository from the current one.
5213 5213
5214 5214 By default, push will not allow creation of new heads at the
5215 5215 destination, since multiple heads would make it unclear which head
5216 5216 to use. In this situation, it is recommended to pull and merge
5217 5217 before pushing.
5218 5218
5219 5219 Use --new-branch if you want to allow push to create a new named
5220 5220 branch that is not present at the destination. This allows you to
5221 5221 only create a new branch without forcing other changes.
5222 5222
5223 5223 .. note::
5224 5224
5225 5225 Extra care should be taken with the -f/--force option,
5226 5226 which will push all new heads on all branches, an action which will
5227 5227 almost always cause confusion for collaborators.
5228 5228
5229 5229 If -r/--rev is used, the specified revision and all its ancestors
5230 5230 will be pushed to the remote repository.
5231 5231
5232 5232 If -B/--bookmark is used, the specified bookmarked revision, its
5233 5233 ancestors, and the bookmark will be pushed to the remote
5234 5234 repository.
5235 5235
5236 5236 Please see :hg:`help urls` for important details about ``ssh://``
5237 5237 URLs. If DESTINATION is omitted, a default path will be used.
5238 5238
5239 5239 Returns 0 if push was successful, 1 if nothing to push.
5240 5240 """
5241 5241
5242 5242 if opts.get('bookmark'):
5243 5243 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
5244 5244 for b in opts['bookmark']:
5245 5245 # translate -B options to -r so changesets get pushed
5246 5246 if b in repo._bookmarks:
5247 5247 opts.setdefault('rev', []).append(b)
5248 5248 else:
5249 5249 # if we try to push a deleted bookmark, translate it to null
5250 5250 # this lets simultaneous -r, -b options continue working
5251 5251 opts.setdefault('rev', []).append("null")
5252 5252
5253 5253 dest = ui.expandpath(dest or 'default-push', dest or 'default')
5254 5254 dest, branches = hg.parseurl(dest, opts.get('branch'))
5255 5255 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
5256 5256 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
5257 5257 try:
5258 5258 other = hg.peer(repo, opts, dest)
5259 5259 except error.RepoError:
5260 5260 if dest == "default-push":
5261 5261 raise util.Abort(_("default repository not configured!"),
5262 5262 hint=_('see the "path" section in "hg help config"'))
5263 5263 else:
5264 5264 raise
5265 5265
5266 5266 if revs:
5267 5267 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
5268 5268 if not revs:
5269 5269 raise util.Abort(_("specified revisions evaluate to an empty set"),
5270 5270 hint=_("use different revision arguments"))
5271 5271
5272 5272 repo._subtoppath = dest
5273 5273 try:
5274 5274 # push subrepos depth-first for coherent ordering
5275 5275 c = repo['']
5276 5276 subs = c.substate # only repos that are committed
5277 5277 for s in sorted(subs):
5278 5278 result = c.sub(s).push(opts)
5279 5279 if result == 0:
5280 5280 return not result
5281 5281 finally:
5282 5282 del repo._subtoppath
5283 5283 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
5284 5284 newbranch=opts.get('new_branch'),
5285 5285 bookmarks=opts.get('bookmark', ()))
5286 5286
5287 5287 result = not pushop.cgresult
5288 5288
5289 5289 if pushop.bkresult is not None:
5290 5290 if pushop.bkresult == 2:
5291 5291 result = 2
5292 5292 elif not result and pushop.bkresult:
5293 5293 result = 2
5294 5294
5295 5295 return result
5296 5296
5297 5297 @command('recover', [])
5298 5298 def recover(ui, repo):
5299 5299 """roll back an interrupted transaction
5300 5300
5301 5301 Recover from an interrupted commit or pull.
5302 5302
5303 5303 This command tries to fix the repository status after an
5304 5304 interrupted operation. It should only be necessary when Mercurial
5305 5305 suggests it.
5306 5306
5307 5307 Returns 0 if successful, 1 if nothing to recover or verify fails.
5308 5308 """
5309 5309 if repo.recover():
5310 5310 return hg.verify(repo)
5311 5311 return 1
5312 5312
5313 5313 @command('^remove|rm',
5314 5314 [('A', 'after', None, _('record delete for missing files')),
5315 5315 ('f', 'force', None,
5316 5316 _('remove (and delete) file even if added or modified')),
5317 5317 ] + subrepoopts + walkopts,
5318 5318 _('[OPTION]... FILE...'),
5319 5319 inferrepo=True)
5320 5320 def remove(ui, repo, *pats, **opts):
5321 5321 """remove the specified files on the next commit
5322 5322
5323 5323 Schedule the indicated files for removal from the current branch.
5324 5324
5325 5325 This command schedules the files to be removed at the next commit.
5326 5326 To undo a remove before that, see :hg:`revert`. To undo added
5327 5327 files, see :hg:`forget`.
5328 5328
5329 5329 .. container:: verbose
5330 5330
5331 5331 -A/--after can be used to remove only files that have already
5332 5332 been deleted, -f/--force can be used to force deletion, and -Af
5333 5333 can be used to remove files from the next revision without
5334 5334 deleting them from the working directory.
5335 5335
5336 5336 The following table details the behavior of remove for different
5337 5337 file states (columns) and option combinations (rows). The file
5338 5338 states are Added [A], Clean [C], Modified [M] and Missing [!]
5339 5339 (as reported by :hg:`status`). The actions are Warn, Remove
5340 5340 (from branch) and Delete (from disk):
5341 5341
5342 5342 ========= == == == ==
5343 5343 opt/state A C M !
5344 5344 ========= == == == ==
5345 5345 none W RD W R
5346 5346 -f R RD RD R
5347 5347 -A W W W R
5348 5348 -Af R R R R
5349 5349 ========= == == == ==
5350 5350
5351 5351 Note that remove never deletes files in Added [A] state from the
5352 5352 working directory, not even if option --force is specified.
5353 5353
5354 5354 Returns 0 on success, 1 if any warnings encountered.
5355 5355 """
5356 5356
5357 5357 after, force = opts.get('after'), opts.get('force')
5358 5358 if not pats and not after:
5359 5359 raise util.Abort(_('no files specified'))
5360 5360
5361 5361 m = scmutil.match(repo[None], pats, opts)
5362 5362 subrepos = opts.get('subrepos')
5363 5363 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
5364 5364
5365 5365 @command('rename|move|mv',
5366 5366 [('A', 'after', None, _('record a rename that has already occurred')),
5367 5367 ('f', 'force', None, _('forcibly copy over an existing managed file')),
5368 5368 ] + walkopts + dryrunopts,
5369 5369 _('[OPTION]... SOURCE... DEST'))
5370 5370 def rename(ui, repo, *pats, **opts):
5371 5371 """rename files; equivalent of copy + remove
5372 5372
5373 5373 Mark dest as copies of sources; mark sources for deletion. If dest
5374 5374 is a directory, copies are put in that directory. If dest is a
5375 5375 file, there can only be one source.
5376 5376
5377 5377 By default, this command copies the contents of files as they
5378 5378 exist in the working directory. If invoked with -A/--after, the
5379 5379 operation is recorded, but no copying is performed.
5380 5380
5381 5381 This command takes effect at the next commit. To undo a rename
5382 5382 before that, see :hg:`revert`.
5383 5383
5384 5384 Returns 0 on success, 1 if errors are encountered.
5385 5385 """
5386 5386 wlock = repo.wlock(False)
5387 5387 try:
5388 5388 return cmdutil.copy(ui, repo, pats, opts, rename=True)
5389 5389 finally:
5390 5390 wlock.release()
5391 5391
5392 5392 @command('resolve',
5393 5393 [('a', 'all', None, _('select all unresolved files')),
5394 5394 ('l', 'list', None, _('list state of files needing merge')),
5395 5395 ('m', 'mark', None, _('mark files as resolved')),
5396 5396 ('u', 'unmark', None, _('mark files as unresolved')),
5397 5397 ('n', 'no-status', None, _('hide status prefix'))]
5398 5398 + mergetoolopts + walkopts + formatteropts,
5399 5399 _('[OPTION]... [FILE]...'),
5400 5400 inferrepo=True)
5401 5401 def resolve(ui, repo, *pats, **opts):
5402 5402 """redo merges or set/view the merge status of files
5403 5403
5404 5404 Merges with unresolved conflicts are often the result of
5405 5405 non-interactive merging using the ``internal:merge`` configuration
5406 5406 setting, or a command-line merge tool like ``diff3``. The resolve
5407 5407 command is used to manage the files involved in a merge, after
5408 5408 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
5409 5409 working directory must have two parents). See :hg:`help
5410 5410 merge-tools` for information on configuring merge tools.
5411 5411
5412 5412 The resolve command can be used in the following ways:
5413 5413
5414 5414 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
5415 5415 files, discarding any previous merge attempts. Re-merging is not
5416 5416 performed for files already marked as resolved. Use ``--all/-a``
5417 5417 to select all unresolved files. ``--tool`` can be used to specify
5418 5418 the merge tool used for the given files. It overrides the HGMERGE
5419 5419 environment variable and your configuration files. Previous file
5420 5420 contents are saved with a ``.orig`` suffix.
5421 5421
5422 5422 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
5423 5423 (e.g. after having manually fixed-up the files). The default is
5424 5424 to mark all unresolved files.
5425 5425
5426 5426 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5427 5427 default is to mark all resolved files.
5428 5428
5429 5429 - :hg:`resolve -l`: list files which had or still have conflicts.
5430 5430 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5431 5431
5432 5432 Note that Mercurial will not let you commit files with unresolved
5433 5433 merge conflicts. You must use :hg:`resolve -m ...` before you can
5434 5434 commit after a conflicting merge.
5435 5435
5436 5436 Returns 0 on success, 1 if any files fail a resolve attempt.
5437 5437 """
5438 5438
5439 5439 all, mark, unmark, show, nostatus = \
5440 5440 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
5441 5441
5442 5442 if (show and (mark or unmark)) or (mark and unmark):
5443 5443 raise util.Abort(_("too many options specified"))
5444 5444 if pats and all:
5445 5445 raise util.Abort(_("can't specify --all and patterns"))
5446 5446 if not (all or pats or show or mark or unmark):
5447 5447 raise util.Abort(_('no files or directories specified'),
5448 5448 hint=('use --all to remerge all files'))
5449 5449
5450 5450 if show:
5451 5451 fm = ui.formatter('resolve', opts)
5452 5452 ms = mergemod.mergestate(repo)
5453 5453 m = scmutil.match(repo[None], pats, opts)
5454 5454 for f in ms:
5455 5455 if not m(f):
5456 5456 continue
5457 5457 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved'}[ms[f]]
5458 5458 fm.startitem()
5459 5459 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
5460 5460 fm.write('path', '%s\n', f, label=l)
5461 5461 fm.end()
5462 5462 return 0
5463 5463
5464 5464 wlock = repo.wlock()
5465 5465 try:
5466 5466 ms = mergemod.mergestate(repo)
5467 5467
5468 5468 if not (ms.active() or repo.dirstate.p2() != nullid):
5469 5469 raise util.Abort(
5470 5470 _('resolve command not applicable when not merging'))
5471 5471
5472 5472 m = scmutil.match(repo[None], pats, opts)
5473 5473 ret = 0
5474 5474 didwork = False
5475 5475
5476 5476 for f in ms:
5477 5477 if not m(f):
5478 5478 continue
5479 5479
5480 5480 didwork = True
5481 5481
5482 5482 if mark:
5483 5483 ms.mark(f, "r")
5484 5484 elif unmark:
5485 5485 ms.mark(f, "u")
5486 5486 else:
5487 5487 wctx = repo[None]
5488 5488
5489 5489 # backup pre-resolve (merge uses .orig for its own purposes)
5490 5490 a = repo.wjoin(f)
5491 5491 util.copyfile(a, a + ".resolve")
5492 5492
5493 5493 try:
5494 5494 # resolve file
5495 5495 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5496 5496 'resolve')
5497 5497 if ms.resolve(f, wctx):
5498 5498 ret = 1
5499 5499 finally:
5500 5500 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5501 5501 ms.commit()
5502 5502
5503 5503 # replace filemerge's .orig file with our resolve file
5504 5504 util.rename(a + ".resolve", a + ".orig")
5505 5505
5506 5506 ms.commit()
5507 5507
5508 5508 if not didwork and pats:
5509 5509 ui.warn(_("arguments do not match paths that need resolving\n"))
5510 5510
5511 5511 finally:
5512 5512 wlock.release()
5513 5513
5514 5514 # Nudge users into finishing an unfinished operation
5515 5515 if not list(ms.unresolved()):
5516 5516 ui.status(_('(no more unresolved files)\n'))
5517 5517
5518 5518 return ret
5519 5519
5520 5520 @command('revert',
5521 5521 [('a', 'all', None, _('revert all changes when no arguments given')),
5522 5522 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5523 5523 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5524 5524 ('C', 'no-backup', None, _('do not save backup copies of files')),
5525 5525 ('i', 'interactive', None,
5526 5526 _('interactively select the changes (EXPERIMENTAL)')),
5527 5527 ] + walkopts + dryrunopts,
5528 5528 _('[OPTION]... [-r REV] [NAME]...'))
5529 5529 def revert(ui, repo, *pats, **opts):
5530 5530 """restore files to their checkout state
5531 5531
5532 5532 .. note::
5533 5533
5534 5534 To check out earlier revisions, you should use :hg:`update REV`.
5535 5535 To cancel an uncommitted merge (and lose your changes),
5536 5536 use :hg:`update --clean .`.
5537 5537
5538 5538 With no revision specified, revert the specified files or directories
5539 5539 to the contents they had in the parent of the working directory.
5540 5540 This restores the contents of files to an unmodified
5541 5541 state and unschedules adds, removes, copies, and renames. If the
5542 5542 working directory has two parents, you must explicitly specify a
5543 5543 revision.
5544 5544
5545 5545 Using the -r/--rev or -d/--date options, revert the given files or
5546 5546 directories to their states as of a specific revision. Because
5547 5547 revert does not change the working directory parents, this will
5548 5548 cause these files to appear modified. This can be helpful to "back
5549 5549 out" some or all of an earlier change. See :hg:`backout` for a
5550 5550 related method.
5551 5551
5552 5552 Modified files are saved with a .orig suffix before reverting.
5553 5553 To disable these backups, use --no-backup.
5554 5554
5555 5555 See :hg:`help dates` for a list of formats valid for -d/--date.
5556 5556
5557 5557 Returns 0 on success.
5558 5558 """
5559 5559
5560 5560 if opts.get("date"):
5561 5561 if opts.get("rev"):
5562 5562 raise util.Abort(_("you can't specify a revision and a date"))
5563 5563 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5564 5564
5565 5565 parent, p2 = repo.dirstate.parents()
5566 5566 if not opts.get('rev') and p2 != nullid:
5567 5567 # revert after merge is a trap for new users (issue2915)
5568 5568 raise util.Abort(_('uncommitted merge with no revision specified'),
5569 5569 hint=_('use "hg update" or see "hg help revert"'))
5570 5570
5571 5571 ctx = scmutil.revsingle(repo, opts.get('rev'))
5572 5572
5573 5573 if (not (pats or opts.get('include') or opts.get('exclude') or
5574 5574 opts.get('all') or opts.get('interactive'))):
5575 5575 msg = _("no files or directories specified")
5576 5576 if p2 != nullid:
5577 5577 hint = _("uncommitted merge, use --all to discard all changes,"
5578 5578 " or 'hg update -C .' to abort the merge")
5579 5579 raise util.Abort(msg, hint=hint)
5580 5580 dirty = any(repo.status())
5581 5581 node = ctx.node()
5582 5582 if node != parent:
5583 5583 if dirty:
5584 5584 hint = _("uncommitted changes, use --all to discard all"
5585 5585 " changes, or 'hg update %s' to update") % ctx.rev()
5586 5586 else:
5587 5587 hint = _("use --all to revert all files,"
5588 5588 " or 'hg update %s' to update") % ctx.rev()
5589 5589 elif dirty:
5590 5590 hint = _("uncommitted changes, use --all to discard all changes")
5591 5591 else:
5592 5592 hint = _("use --all to revert all files")
5593 5593 raise util.Abort(msg, hint=hint)
5594 5594
5595 5595 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5596 5596
5597 5597 @command('rollback', dryrunopts +
5598 5598 [('f', 'force', False, _('ignore safety measures'))])
5599 5599 def rollback(ui, repo, **opts):
5600 5600 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5601 5601
5602 5602 Please use :hg:`commit --amend` instead of rollback to correct
5603 5603 mistakes in the last commit.
5604 5604
5605 5605 This command should be used with care. There is only one level of
5606 5606 rollback, and there is no way to undo a rollback. It will also
5607 5607 restore the dirstate at the time of the last transaction, losing
5608 5608 any dirstate changes since that time. This command does not alter
5609 5609 the working directory.
5610 5610
5611 5611 Transactions are used to encapsulate the effects of all commands
5612 5612 that create new changesets or propagate existing changesets into a
5613 5613 repository.
5614 5614
5615 5615 .. container:: verbose
5616 5616
5617 5617 For example, the following commands are transactional, and their
5618 5618 effects can be rolled back:
5619 5619
5620 5620 - commit
5621 5621 - import
5622 5622 - pull
5623 5623 - push (with this repository as the destination)
5624 5624 - unbundle
5625 5625
5626 5626 To avoid permanent data loss, rollback will refuse to rollback a
5627 5627 commit transaction if it isn't checked out. Use --force to
5628 5628 override this protection.
5629 5629
5630 5630 This command is not intended for use on public repositories. Once
5631 5631 changes are visible for pull by other users, rolling a transaction
5632 5632 back locally is ineffective (someone else may already have pulled
5633 5633 the changes). Furthermore, a race is possible with readers of the
5634 5634 repository; for example an in-progress pull from the repository
5635 5635 may fail if a rollback is performed.
5636 5636
5637 5637 Returns 0 on success, 1 if no rollback data is available.
5638 5638 """
5639 5639 return repo.rollback(dryrun=opts.get('dry_run'),
5640 5640 force=opts.get('force'))
5641 5641
5642 5642 @command('root', [])
5643 5643 def root(ui, repo):
5644 5644 """print the root (top) of the current working directory
5645 5645
5646 5646 Print the root directory of the current repository.
5647 5647
5648 5648 Returns 0 on success.
5649 5649 """
5650 5650 ui.write(repo.root + "\n")
5651 5651
5652 5652 @command('^serve',
5653 5653 [('A', 'accesslog', '', _('name of access log file to write to'),
5654 5654 _('FILE')),
5655 5655 ('d', 'daemon', None, _('run server in background')),
5656 5656 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('FILE')),
5657 5657 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5658 5658 # use string type, then we can check if something was passed
5659 5659 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5660 5660 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5661 5661 _('ADDR')),
5662 5662 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5663 5663 _('PREFIX')),
5664 5664 ('n', 'name', '',
5665 5665 _('name to show in web pages (default: working directory)'), _('NAME')),
5666 5666 ('', 'web-conf', '',
5667 5667 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5668 5668 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5669 5669 _('FILE')),
5670 5670 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5671 5671 ('', 'stdio', None, _('for remote clients')),
5672 5672 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5673 5673 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5674 5674 ('', 'style', '', _('template style to use'), _('STYLE')),
5675 5675 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5676 5676 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5677 5677 _('[OPTION]...'),
5678 5678 optionalrepo=True)
5679 5679 def serve(ui, repo, **opts):
5680 5680 """start stand-alone webserver
5681 5681
5682 5682 Start a local HTTP repository browser and pull server. You can use
5683 5683 this for ad-hoc sharing and browsing of repositories. It is
5684 5684 recommended to use a real web server to serve a repository for
5685 5685 longer periods of time.
5686 5686
5687 5687 Please note that the server does not implement access control.
5688 5688 This means that, by default, anybody can read from the server and
5689 5689 nobody can write to it by default. Set the ``web.allow_push``
5690 5690 option to ``*`` to allow everybody to push to the server. You
5691 5691 should use a real web server if you need to authenticate users.
5692 5692
5693 5693 By default, the server logs accesses to stdout and errors to
5694 5694 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5695 5695 files.
5696 5696
5697 5697 To have the server choose a free port number to listen on, specify
5698 5698 a port number of 0; in this case, the server will print the port
5699 5699 number it uses.
5700 5700
5701 5701 Returns 0 on success.
5702 5702 """
5703 5703
5704 5704 if opts["stdio"] and opts["cmdserver"]:
5705 5705 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5706 5706
5707 5707 if opts["stdio"]:
5708 5708 if repo is None:
5709 5709 raise error.RepoError(_("there is no Mercurial repository here"
5710 5710 " (.hg not found)"))
5711 5711 s = sshserver.sshserver(ui, repo)
5712 5712 s.serve_forever()
5713 5713
5714 5714 if opts["cmdserver"]:
5715 5715 service = commandserver.createservice(ui, repo, opts)
5716 5716 return cmdutil.service(opts, initfn=service.init, runfn=service.run)
5717 5717
5718 5718 # this way we can check if something was given in the command-line
5719 5719 if opts.get('port'):
5720 5720 opts['port'] = util.getport(opts.get('port'))
5721 5721
5722 5722 if repo:
5723 5723 baseui = repo.baseui
5724 5724 else:
5725 5725 baseui = ui
5726 5726 optlist = ("name templates style address port prefix ipv6"
5727 5727 " accesslog errorlog certificate encoding")
5728 5728 for o in optlist.split():
5729 5729 val = opts.get(o, '')
5730 5730 if val in (None, ''): # should check against default options instead
5731 5731 continue
5732 5732 baseui.setconfig("web", o, val, 'serve')
5733 5733 if repo and repo.ui != baseui:
5734 5734 repo.ui.setconfig("web", o, val, 'serve')
5735 5735
5736 5736 o = opts.get('web_conf') or opts.get('webdir_conf')
5737 5737 if not o:
5738 5738 if not repo:
5739 5739 raise error.RepoError(_("there is no Mercurial repository"
5740 5740 " here (.hg not found)"))
5741 5741 o = repo
5742 5742
5743 5743 app = hgweb.hgweb(o, baseui=baseui)
5744 5744 service = httpservice(ui, app, opts)
5745 5745 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5746 5746
5747 5747 class httpservice(object):
5748 5748 def __init__(self, ui, app, opts):
5749 5749 self.ui = ui
5750 5750 self.app = app
5751 5751 self.opts = opts
5752 5752
5753 5753 def init(self):
5754 5754 util.setsignalhandler()
5755 5755 self.httpd = hgweb_server.create_server(self.ui, self.app)
5756 5756
5757 5757 if self.opts['port'] and not self.ui.verbose:
5758 5758 return
5759 5759
5760 5760 if self.httpd.prefix:
5761 5761 prefix = self.httpd.prefix.strip('/') + '/'
5762 5762 else:
5763 5763 prefix = ''
5764 5764
5765 5765 port = ':%d' % self.httpd.port
5766 5766 if port == ':80':
5767 5767 port = ''
5768 5768
5769 5769 bindaddr = self.httpd.addr
5770 5770 if bindaddr == '0.0.0.0':
5771 5771 bindaddr = '*'
5772 5772 elif ':' in bindaddr: # IPv6
5773 5773 bindaddr = '[%s]' % bindaddr
5774 5774
5775 5775 fqaddr = self.httpd.fqaddr
5776 5776 if ':' in fqaddr:
5777 5777 fqaddr = '[%s]' % fqaddr
5778 5778 if self.opts['port']:
5779 5779 write = self.ui.status
5780 5780 else:
5781 5781 write = self.ui.write
5782 5782 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5783 5783 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5784 5784 self.ui.flush() # avoid buffering of status message
5785 5785
5786 5786 def run(self):
5787 5787 self.httpd.serve_forever()
5788 5788
5789 5789
5790 5790 @command('^status|st',
5791 5791 [('A', 'all', None, _('show status of all files')),
5792 5792 ('m', 'modified', None, _('show only modified files')),
5793 5793 ('a', 'added', None, _('show only added files')),
5794 5794 ('r', 'removed', None, _('show only removed files')),
5795 5795 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5796 5796 ('c', 'clean', None, _('show only files without changes')),
5797 5797 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5798 5798 ('i', 'ignored', None, _('show only ignored files')),
5799 5799 ('n', 'no-status', None, _('hide status prefix')),
5800 5800 ('C', 'copies', None, _('show source of copied files')),
5801 5801 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5802 5802 ('', 'rev', [], _('show difference from revision'), _('REV')),
5803 5803 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5804 5804 ] + walkopts + subrepoopts + formatteropts,
5805 5805 _('[OPTION]... [FILE]...'),
5806 5806 inferrepo=True)
5807 5807 def status(ui, repo, *pats, **opts):
5808 5808 """show changed files in the working directory
5809 5809
5810 5810 Show status of files in the repository. If names are given, only
5811 5811 files that match are shown. Files that are clean or ignored or
5812 5812 the source of a copy/move operation, are not listed unless
5813 5813 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5814 5814 Unless options described with "show only ..." are given, the
5815 5815 options -mardu are used.
5816 5816
5817 5817 Option -q/--quiet hides untracked (unknown and ignored) files
5818 5818 unless explicitly requested with -u/--unknown or -i/--ignored.
5819 5819
5820 5820 .. note::
5821 5821
5822 5822 status may appear to disagree with diff if permissions have
5823 5823 changed or a merge has occurred. The standard diff format does
5824 5824 not report permission changes and diff only reports changes
5825 5825 relative to one merge parent.
5826 5826
5827 5827 If one revision is given, it is used as the base revision.
5828 5828 If two revisions are given, the differences between them are
5829 5829 shown. The --change option can also be used as a shortcut to list
5830 5830 the changed files of a revision from its first parent.
5831 5831
5832 5832 The codes used to show the status of files are::
5833 5833
5834 5834 M = modified
5835 5835 A = added
5836 5836 R = removed
5837 5837 C = clean
5838 5838 ! = missing (deleted by non-hg command, but still tracked)
5839 5839 ? = not tracked
5840 5840 I = ignored
5841 5841 = origin of the previous file (with --copies)
5842 5842
5843 5843 .. container:: verbose
5844 5844
5845 5845 Examples:
5846 5846
5847 5847 - show changes in the working directory relative to a
5848 5848 changeset::
5849 5849
5850 5850 hg status --rev 9353
5851 5851
5852 5852 - show changes in the working directory relative to the
5853 5853 current directory (see :hg:`help patterns` for more information)::
5854 5854
5855 5855 hg status re:
5856 5856
5857 5857 - show all changes including copies in an existing changeset::
5858 5858
5859 5859 hg status --copies --change 9353
5860 5860
5861 5861 - get a NUL separated list of added files, suitable for xargs::
5862 5862
5863 5863 hg status -an0
5864 5864
5865 5865 Returns 0 on success.
5866 5866 """
5867 5867
5868 5868 revs = opts.get('rev')
5869 5869 change = opts.get('change')
5870 5870
5871 5871 if revs and change:
5872 5872 msg = _('cannot specify --rev and --change at the same time')
5873 5873 raise util.Abort(msg)
5874 5874 elif change:
5875 5875 node2 = scmutil.revsingle(repo, change, None).node()
5876 5876 node1 = repo[node2].p1().node()
5877 5877 else:
5878 5878 node1, node2 = scmutil.revpair(repo, revs)
5879 5879
5880 5880 if pats:
5881 5881 cwd = repo.getcwd()
5882 5882 else:
5883 5883 cwd = ''
5884 5884
5885 5885 if opts.get('print0'):
5886 5886 end = '\0'
5887 5887 else:
5888 5888 end = '\n'
5889 5889 copy = {}
5890 5890 states = 'modified added removed deleted unknown ignored clean'.split()
5891 5891 show = [k for k in states if opts.get(k)]
5892 5892 if opts.get('all'):
5893 5893 show += ui.quiet and (states[:4] + ['clean']) or states
5894 5894 if not show:
5895 5895 if ui.quiet:
5896 5896 show = states[:4]
5897 5897 else:
5898 5898 show = states[:5]
5899 5899
5900 5900 m = scmutil.match(repo[node2], pats, opts)
5901 5901 stat = repo.status(node1, node2, m,
5902 5902 'ignored' in show, 'clean' in show, 'unknown' in show,
5903 5903 opts.get('subrepos'))
5904 5904 changestates = zip(states, 'MAR!?IC', stat)
5905 5905
5906 5906 if (opts.get('all') or opts.get('copies')
5907 5907 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
5908 5908 copy = copies.pathcopies(repo[node1], repo[node2], m)
5909 5909
5910 5910 fm = ui.formatter('status', opts)
5911 5911 fmt = '%s' + end
5912 5912 showchar = not opts.get('no_status')
5913 5913
5914 5914 for state, char, files in changestates:
5915 5915 if state in show:
5916 5916 label = 'status.' + state
5917 5917 for f in files:
5918 5918 fm.startitem()
5919 5919 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5920 5920 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5921 5921 if f in copy:
5922 5922 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5923 5923 label='status.copied')
5924 5924 fm.end()
5925 5925
5926 5926 @command('^summary|sum',
5927 5927 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5928 5928 def summary(ui, repo, **opts):
5929 5929 """summarize working directory state
5930 5930
5931 5931 This generates a brief summary of the working directory state,
5932 5932 including parents, branch, commit status, phase and available updates.
5933 5933
5934 5934 With the --remote option, this will check the default paths for
5935 5935 incoming and outgoing changes. This can be time-consuming.
5936 5936
5937 5937 Returns 0 on success.
5938 5938 """
5939 5939
5940 5940 ctx = repo[None]
5941 5941 parents = ctx.parents()
5942 5942 pnode = parents[0].node()
5943 5943 marks = []
5944 5944
5945 5945 for p in parents:
5946 5946 # label with log.changeset (instead of log.parent) since this
5947 5947 # shows a working directory parent *changeset*:
5948 5948 # i18n: column positioning for "hg summary"
5949 5949 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5950 5950 label='log.changeset changeset.%s' % p.phasestr())
5951 5951 ui.write(' '.join(p.tags()), label='log.tag')
5952 5952 if p.bookmarks():
5953 5953 marks.extend(p.bookmarks())
5954 5954 if p.rev() == -1:
5955 5955 if not len(repo):
5956 5956 ui.write(_(' (empty repository)'))
5957 5957 else:
5958 5958 ui.write(_(' (no revision checked out)'))
5959 5959 ui.write('\n')
5960 5960 if p.description():
5961 5961 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5962 5962 label='log.summary')
5963 5963
5964 5964 branch = ctx.branch()
5965 5965 bheads = repo.branchheads(branch)
5966 5966 # i18n: column positioning for "hg summary"
5967 5967 m = _('branch: %s\n') % branch
5968 5968 if branch != 'default':
5969 5969 ui.write(m, label='log.branch')
5970 5970 else:
5971 5971 ui.status(m, label='log.branch')
5972 5972
5973 5973 if marks:
5974 5974 active = repo._activebookmark
5975 5975 # i18n: column positioning for "hg summary"
5976 5976 ui.write(_('bookmarks:'), label='log.bookmark')
5977 5977 if active is not None:
5978 5978 if active in marks:
5979 5979 ui.write(' *' + active, label=activebookmarklabel)
5980 5980 marks.remove(active)
5981 5981 else:
5982 5982 ui.write(' [%s]' % active, label=activebookmarklabel)
5983 5983 for m in marks:
5984 5984 ui.write(' ' + m, label='log.bookmark')
5985 5985 ui.write('\n', label='log.bookmark')
5986 5986
5987 5987 status = repo.status(unknown=True)
5988 5988
5989 5989 c = repo.dirstate.copies()
5990 5990 copied, renamed = [], []
5991 5991 for d, s in c.iteritems():
5992 5992 if s in status.removed:
5993 5993 status.removed.remove(s)
5994 5994 renamed.append(d)
5995 5995 else:
5996 5996 copied.append(d)
5997 5997 if d in status.added:
5998 5998 status.added.remove(d)
5999 5999
6000 6000 ms = mergemod.mergestate(repo)
6001 6001 unresolved = [f for f in ms if ms[f] == 'u']
6002 6002
6003 6003 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
6004 6004
6005 6005 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
6006 6006 (ui.label(_('%d added'), 'status.added'), status.added),
6007 6007 (ui.label(_('%d removed'), 'status.removed'), status.removed),
6008 6008 (ui.label(_('%d renamed'), 'status.copied'), renamed),
6009 6009 (ui.label(_('%d copied'), 'status.copied'), copied),
6010 6010 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
6011 6011 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
6012 6012 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
6013 6013 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
6014 6014 t = []
6015 6015 for l, s in labels:
6016 6016 if s:
6017 6017 t.append(l % len(s))
6018 6018
6019 6019 t = ', '.join(t)
6020 6020 cleanworkdir = False
6021 6021
6022 6022 if repo.vfs.exists('updatestate'):
6023 6023 t += _(' (interrupted update)')
6024 6024 elif len(parents) > 1:
6025 6025 t += _(' (merge)')
6026 6026 elif branch != parents[0].branch():
6027 6027 t += _(' (new branch)')
6028 6028 elif (parents[0].closesbranch() and
6029 6029 pnode in repo.branchheads(branch, closed=True)):
6030 6030 t += _(' (head closed)')
6031 6031 elif not (status.modified or status.added or status.removed or renamed or
6032 6032 copied or subs):
6033 6033 t += _(' (clean)')
6034 6034 cleanworkdir = True
6035 6035 elif pnode not in bheads:
6036 6036 t += _(' (new branch head)')
6037 6037
6038 6038 if parents:
6039 6039 pendingphase = max(p.phase() for p in parents)
6040 6040 else:
6041 6041 pendingphase = phases.public
6042 6042
6043 6043 if pendingphase > phases.newcommitphase(ui):
6044 6044 t += ' (%s)' % phases.phasenames[pendingphase]
6045 6045
6046 6046 if cleanworkdir:
6047 6047 # i18n: column positioning for "hg summary"
6048 6048 ui.status(_('commit: %s\n') % t.strip())
6049 6049 else:
6050 6050 # i18n: column positioning for "hg summary"
6051 6051 ui.write(_('commit: %s\n') % t.strip())
6052 6052
6053 6053 # all ancestors of branch heads - all ancestors of parent = new csets
6054 6054 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
6055 6055 bheads))
6056 6056
6057 6057 if new == 0:
6058 6058 # i18n: column positioning for "hg summary"
6059 6059 ui.status(_('update: (current)\n'))
6060 6060 elif pnode not in bheads:
6061 6061 # i18n: column positioning for "hg summary"
6062 6062 ui.write(_('update: %d new changesets (update)\n') % new)
6063 6063 else:
6064 6064 # i18n: column positioning for "hg summary"
6065 6065 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
6066 6066 (new, len(bheads)))
6067 6067
6068 6068 t = []
6069 6069 draft = len(repo.revs('draft()'))
6070 6070 if draft:
6071 6071 t.append(_('%d draft') % draft)
6072 6072 secret = len(repo.revs('secret()'))
6073 6073 if secret:
6074 6074 t.append(_('%d secret') % secret)
6075 6075
6076 6076 if draft or secret:
6077 6077 ui.status(_('phases: %s\n') % ', '.join(t))
6078 6078
6079 6079 cmdutil.summaryhooks(ui, repo)
6080 6080
6081 6081 if opts.get('remote'):
6082 6082 needsincoming, needsoutgoing = True, True
6083 6083 else:
6084 6084 needsincoming, needsoutgoing = False, False
6085 6085 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
6086 6086 if i:
6087 6087 needsincoming = True
6088 6088 if o:
6089 6089 needsoutgoing = True
6090 6090 if not needsincoming and not needsoutgoing:
6091 6091 return
6092 6092
6093 6093 def getincoming():
6094 6094 source, branches = hg.parseurl(ui.expandpath('default'))
6095 6095 sbranch = branches[0]
6096 6096 try:
6097 6097 other = hg.peer(repo, {}, source)
6098 6098 except error.RepoError:
6099 6099 if opts.get('remote'):
6100 6100 raise
6101 6101 return source, sbranch, None, None, None
6102 6102 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
6103 6103 if revs:
6104 6104 revs = [other.lookup(rev) for rev in revs]
6105 6105 ui.debug('comparing with %s\n' % util.hidepassword(source))
6106 6106 repo.ui.pushbuffer()
6107 6107 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
6108 6108 repo.ui.popbuffer()
6109 6109 return source, sbranch, other, commoninc, commoninc[1]
6110 6110
6111 6111 if needsincoming:
6112 6112 source, sbranch, sother, commoninc, incoming = getincoming()
6113 6113 else:
6114 6114 source = sbranch = sother = commoninc = incoming = None
6115 6115
6116 6116 def getoutgoing():
6117 6117 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
6118 6118 dbranch = branches[0]
6119 6119 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
6120 6120 if source != dest:
6121 6121 try:
6122 6122 dother = hg.peer(repo, {}, dest)
6123 6123 except error.RepoError:
6124 6124 if opts.get('remote'):
6125 6125 raise
6126 6126 return dest, dbranch, None, None
6127 6127 ui.debug('comparing with %s\n' % util.hidepassword(dest))
6128 6128 elif sother is None:
6129 6129 # there is no explicit destination peer, but source one is invalid
6130 6130 return dest, dbranch, None, None
6131 6131 else:
6132 6132 dother = sother
6133 6133 if (source != dest or (sbranch is not None and sbranch != dbranch)):
6134 6134 common = None
6135 6135 else:
6136 6136 common = commoninc
6137 6137 if revs:
6138 6138 revs = [repo.lookup(rev) for rev in revs]
6139 6139 repo.ui.pushbuffer()
6140 6140 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
6141 6141 commoninc=common)
6142 6142 repo.ui.popbuffer()
6143 6143 return dest, dbranch, dother, outgoing
6144 6144
6145 6145 if needsoutgoing:
6146 6146 dest, dbranch, dother, outgoing = getoutgoing()
6147 6147 else:
6148 6148 dest = dbranch = dother = outgoing = None
6149 6149
6150 6150 if opts.get('remote'):
6151 6151 t = []
6152 6152 if incoming:
6153 6153 t.append(_('1 or more incoming'))
6154 6154 o = outgoing.missing
6155 6155 if o:
6156 6156 t.append(_('%d outgoing') % len(o))
6157 6157 other = dother or sother
6158 6158 if 'bookmarks' in other.listkeys('namespaces'):
6159 6159 counts = bookmarks.summary(repo, other)
6160 6160 if counts[0] > 0:
6161 6161 t.append(_('%d incoming bookmarks') % counts[0])
6162 6162 if counts[1] > 0:
6163 6163 t.append(_('%d outgoing bookmarks') % counts[1])
6164 6164
6165 6165 if t:
6166 6166 # i18n: column positioning for "hg summary"
6167 6167 ui.write(_('remote: %s\n') % (', '.join(t)))
6168 6168 else:
6169 6169 # i18n: column positioning for "hg summary"
6170 6170 ui.status(_('remote: (synced)\n'))
6171 6171
6172 6172 cmdutil.summaryremotehooks(ui, repo, opts,
6173 6173 ((source, sbranch, sother, commoninc),
6174 6174 (dest, dbranch, dother, outgoing)))
6175 6175
6176 6176 @command('tag',
6177 6177 [('f', 'force', None, _('force tag')),
6178 6178 ('l', 'local', None, _('make the tag local')),
6179 6179 ('r', 'rev', '', _('revision to tag'), _('REV')),
6180 6180 ('', 'remove', None, _('remove a tag')),
6181 6181 # -l/--local is already there, commitopts cannot be used
6182 6182 ('e', 'edit', None, _('invoke editor on commit messages')),
6183 6183 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
6184 6184 ] + commitopts2,
6185 6185 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
6186 6186 def tag(ui, repo, name1, *names, **opts):
6187 6187 """add one or more tags for the current or given revision
6188 6188
6189 6189 Name a particular revision using <name>.
6190 6190
6191 6191 Tags are used to name particular revisions of the repository and are
6192 6192 very useful to compare different revisions, to go back to significant
6193 6193 earlier versions or to mark branch points as releases, etc. Changing
6194 6194 an existing tag is normally disallowed; use -f/--force to override.
6195 6195
6196 6196 If no revision is given, the parent of the working directory is
6197 6197 used.
6198 6198
6199 6199 To facilitate version control, distribution, and merging of tags,
6200 6200 they are stored as a file named ".hgtags" which is managed similarly
6201 6201 to other project files and can be hand-edited if necessary. This
6202 6202 also means that tagging creates a new commit. The file
6203 6203 ".hg/localtags" is used for local tags (not shared among
6204 6204 repositories).
6205 6205
6206 6206 Tag commits are usually made at the head of a branch. If the parent
6207 6207 of the working directory is not a branch head, :hg:`tag` aborts; use
6208 6208 -f/--force to force the tag commit to be based on a non-head
6209 6209 changeset.
6210 6210
6211 6211 See :hg:`help dates` for a list of formats valid for -d/--date.
6212 6212
6213 6213 Since tag names have priority over branch names during revision
6214 6214 lookup, using an existing branch name as a tag name is discouraged.
6215 6215
6216 6216 Returns 0 on success.
6217 6217 """
6218 6218 wlock = lock = None
6219 6219 try:
6220 6220 wlock = repo.wlock()
6221 6221 lock = repo.lock()
6222 6222 rev_ = "."
6223 6223 names = [t.strip() for t in (name1,) + names]
6224 6224 if len(names) != len(set(names)):
6225 6225 raise util.Abort(_('tag names must be unique'))
6226 6226 for n in names:
6227 6227 scmutil.checknewlabel(repo, n, 'tag')
6228 6228 if not n:
6229 6229 raise util.Abort(_('tag names cannot consist entirely of '
6230 6230 'whitespace'))
6231 6231 if opts.get('rev') and opts.get('remove'):
6232 6232 raise util.Abort(_("--rev and --remove are incompatible"))
6233 6233 if opts.get('rev'):
6234 6234 rev_ = opts['rev']
6235 6235 message = opts.get('message')
6236 6236 if opts.get('remove'):
6237 6237 if opts.get('local'):
6238 6238 expectedtype = 'local'
6239 6239 else:
6240 6240 expectedtype = 'global'
6241 6241
6242 6242 for n in names:
6243 6243 if not repo.tagtype(n):
6244 6244 raise util.Abort(_("tag '%s' does not exist") % n)
6245 6245 if repo.tagtype(n) != expectedtype:
6246 6246 if expectedtype == 'global':
6247 6247 raise util.Abort(_("tag '%s' is not a global tag") % n)
6248 6248 else:
6249 6249 raise util.Abort(_("tag '%s' is not a local tag") % n)
6250 6250 rev_ = nullid
6251 6251 if not message:
6252 6252 # we don't translate commit messages
6253 6253 message = 'Removed tag %s' % ', '.join(names)
6254 6254 elif not opts.get('force'):
6255 6255 for n in names:
6256 6256 if n in repo.tags():
6257 6257 raise util.Abort(_("tag '%s' already exists "
6258 6258 "(use -f to force)") % n)
6259 6259 if not opts.get('local'):
6260 6260 p1, p2 = repo.dirstate.parents()
6261 6261 if p2 != nullid:
6262 6262 raise util.Abort(_('uncommitted merge'))
6263 6263 bheads = repo.branchheads()
6264 6264 if not opts.get('force') and bheads and p1 not in bheads:
6265 6265 raise util.Abort(_('not at a branch head (use -f to force)'))
6266 6266 r = scmutil.revsingle(repo, rev_).node()
6267 6267
6268 6268 if not message:
6269 6269 # we don't translate commit messages
6270 6270 message = ('Added tag %s for changeset %s' %
6271 6271 (', '.join(names), short(r)))
6272 6272
6273 6273 date = opts.get('date')
6274 6274 if date:
6275 6275 date = util.parsedate(date)
6276 6276
6277 6277 if opts.get('remove'):
6278 6278 editform = 'tag.remove'
6279 6279 else:
6280 6280 editform = 'tag.add'
6281 6281 editor = cmdutil.getcommiteditor(editform=editform, **opts)
6282 6282
6283 6283 # don't allow tagging the null rev
6284 6284 if (not opts.get('remove') and
6285 6285 scmutil.revsingle(repo, rev_).rev() == nullrev):
6286 6286 raise util.Abort(_("cannot tag null revision"))
6287 6287
6288 6288 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
6289 6289 editor=editor)
6290 6290 finally:
6291 6291 release(lock, wlock)
6292 6292
6293 6293 @command('tags', formatteropts, '')
6294 6294 def tags(ui, repo, **opts):
6295 6295 """list repository tags
6296 6296
6297 6297 This lists both regular and local tags. When the -v/--verbose
6298 6298 switch is used, a third column "local" is printed for local tags.
6299 6299
6300 6300 Returns 0 on success.
6301 6301 """
6302 6302
6303 6303 fm = ui.formatter('tags', opts)
6304 6304 hexfunc = fm.hexfunc
6305 6305 tagtype = ""
6306 6306
6307 6307 for t, n in reversed(repo.tagslist()):
6308 6308 hn = hexfunc(n)
6309 6309 label = 'tags.normal'
6310 6310 tagtype = ''
6311 6311 if repo.tagtype(t) == 'local':
6312 6312 label = 'tags.local'
6313 6313 tagtype = 'local'
6314 6314
6315 6315 fm.startitem()
6316 6316 fm.write('tag', '%s', t, label=label)
6317 6317 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
6318 6318 fm.condwrite(not ui.quiet, 'rev node', fmt,
6319 6319 repo.changelog.rev(n), hn, label=label)
6320 6320 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
6321 6321 tagtype, label=label)
6322 6322 fm.plain('\n')
6323 6323 fm.end()
6324 6324
6325 6325 @command('tip',
6326 6326 [('p', 'patch', None, _('show patch')),
6327 6327 ('g', 'git', None, _('use git extended diff format')),
6328 6328 ] + templateopts,
6329 6329 _('[-p] [-g]'))
6330 6330 def tip(ui, repo, **opts):
6331 6331 """show the tip revision (DEPRECATED)
6332 6332
6333 6333 The tip revision (usually just called the tip) is the changeset
6334 6334 most recently added to the repository (and therefore the most
6335 6335 recently changed head).
6336 6336
6337 6337 If you have just made a commit, that commit will be the tip. If
6338 6338 you have just pulled changes from another repository, the tip of
6339 6339 that repository becomes the current tip. The "tip" tag is special
6340 6340 and cannot be renamed or assigned to a different changeset.
6341 6341
6342 6342 This command is deprecated, please use :hg:`heads` instead.
6343 6343
6344 6344 Returns 0 on success.
6345 6345 """
6346 6346 displayer = cmdutil.show_changeset(ui, repo, opts)
6347 6347 displayer.show(repo['tip'])
6348 6348 displayer.close()
6349 6349
6350 6350 @command('unbundle',
6351 6351 [('u', 'update', None,
6352 6352 _('update to new branch head if changesets were unbundled'))],
6353 6353 _('[-u] FILE...'))
6354 6354 def unbundle(ui, repo, fname1, *fnames, **opts):
6355 6355 """apply one or more changegroup files
6356 6356
6357 6357 Apply one or more compressed changegroup files generated by the
6358 6358 bundle command.
6359 6359
6360 6360 Returns 0 on success, 1 if an update has unresolved files.
6361 6361 """
6362 6362 fnames = (fname1,) + fnames
6363 6363
6364 6364 lock = repo.lock()
6365 6365 try:
6366 6366 for fname in fnames:
6367 6367 f = hg.openpath(ui, fname)
6368 6368 gen = exchange.readbundle(ui, f, fname)
6369 6369 if isinstance(gen, bundle2.unbundle20):
6370 6370 tr = repo.transaction('unbundle')
6371 6371 try:
6372 6372 op = bundle2.processbundle(repo, gen, lambda: tr)
6373 6373 tr.close()
6374 6374 finally:
6375 6375 if tr:
6376 6376 tr.release()
6377 6377 changes = [r.get('result', 0)
6378 6378 for r in op.records['changegroup']]
6379 6379 modheads = changegroup.combineresults(changes)
6380 6380 else:
6381 6381 modheads = changegroup.addchangegroup(repo, gen, 'unbundle',
6382 6382 'bundle:' + fname)
6383 6383 finally:
6384 6384 lock.release()
6385 6385
6386 6386 return postincoming(ui, repo, modheads, opts.get('update'), None)
6387 6387
6388 6388 @command('^update|up|checkout|co',
6389 6389 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
6390 6390 ('c', 'check', None,
6391 6391 _('update across branches if no uncommitted changes')),
6392 6392 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6393 6393 ('r', 'rev', '', _('revision'), _('REV'))
6394 6394 ] + mergetoolopts,
6395 6395 _('[-c] [-C] [-d DATE] [[-r] REV]'))
6396 6396 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
6397 6397 tool=None):
6398 6398 """update working directory (or switch revisions)
6399 6399
6400 6400 Update the repository's working directory to the specified
6401 6401 changeset. If no changeset is specified, update to the tip of the
6402 6402 current named branch and move the active bookmark (see :hg:`help
6403 6403 bookmarks`).
6404 6404
6405 6405 Update sets the working directory's parent revision to the specified
6406 6406 changeset (see :hg:`help parents`).
6407 6407
6408 6408 If the changeset is not a descendant or ancestor of the working
6409 6409 directory's parent, the update is aborted. With the -c/--check
6410 6410 option, the working directory is checked for uncommitted changes; if
6411 6411 none are found, the working directory is updated to the specified
6412 6412 changeset.
6413 6413
6414 6414 .. container:: verbose
6415 6415
6416 6416 The following rules apply when the working directory contains
6417 6417 uncommitted changes:
6418 6418
6419 6419 1. If neither -c/--check nor -C/--clean is specified, and if
6420 6420 the requested changeset is an ancestor or descendant of
6421 6421 the working directory's parent, the uncommitted changes
6422 6422 are merged into the requested changeset and the merged
6423 6423 result is left uncommitted. If the requested changeset is
6424 6424 not an ancestor or descendant (that is, it is on another
6425 6425 branch), the update is aborted and the uncommitted changes
6426 6426 are preserved.
6427 6427
6428 6428 2. With the -c/--check option, the update is aborted and the
6429 6429 uncommitted changes are preserved.
6430 6430
6431 6431 3. With the -C/--clean option, uncommitted changes are discarded and
6432 6432 the working directory is updated to the requested changeset.
6433 6433
6434 6434 To cancel an uncommitted merge (and lose your changes), use
6435 6435 :hg:`update --clean .`.
6436 6436
6437 6437 Use null as the changeset to remove the working directory (like
6438 6438 :hg:`clone -U`).
6439 6439
6440 6440 If you want to revert just one file to an older revision, use
6441 6441 :hg:`revert [-r REV] NAME`.
6442 6442
6443 6443 See :hg:`help dates` for a list of formats valid for -d/--date.
6444 6444
6445 6445 Returns 0 on success, 1 if there are unresolved files.
6446 6446 """
6447 6447 if rev and node:
6448 6448 raise util.Abort(_("please specify just one revision"))
6449 6449
6450 6450 if rev is None or rev == '':
6451 6451 rev = node
6452 6452
6453 6453 cmdutil.clearunfinished(repo)
6454 6454
6455 6455 # with no argument, we also move the active bookmark, if any
6456 6456 rev, movemarkfrom = bookmarks.calculateupdate(ui, repo, rev)
6457 6457
6458 6458 # if we defined a bookmark, we have to remember the original bookmark name
6459 6459 brev = rev
6460 6460 rev = scmutil.revsingle(repo, rev, rev).rev()
6461 6461
6462 6462 if check and clean:
6463 6463 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
6464 6464
6465 6465 if date:
6466 6466 if rev is not None:
6467 6467 raise util.Abort(_("you can't specify a revision and a date"))
6468 6468 rev = cmdutil.finddate(ui, repo, date)
6469 6469
6470 6470 if check:
6471 6471 cmdutil.bailifchanged(repo, merge=False)
6472 6472 if rev is None:
6473 6473 rev = repo[repo[None].branch()].rev()
6474 6474
6475 6475 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
6476 6476
6477 6477 if clean:
6478 6478 ret = hg.clean(repo, rev)
6479 6479 else:
6480 6480 ret = hg.update(repo, rev)
6481 6481
6482 6482 if not ret and movemarkfrom:
6483 6483 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
6484 6484 ui.status(_("updating bookmark %s\n") % repo._activebookmark)
6485 6485 elif brev in repo._bookmarks:
6486 6486 bookmarks.activate(repo, brev)
6487 6487 ui.status(_("(activating bookmark %s)\n") % brev)
6488 6488 elif brev:
6489 6489 if repo._activebookmark:
6490 6490 ui.status(_("(leaving bookmark %s)\n") %
6491 6491 repo._activebookmark)
6492 6492 bookmarks.deactivate(repo)
6493 6493
6494 6494 return ret
6495 6495
6496 6496 @command('verify', [])
6497 6497 def verify(ui, repo):
6498 6498 """verify the integrity of the repository
6499 6499
6500 6500 Verify the integrity of the current repository.
6501 6501
6502 6502 This will perform an extensive check of the repository's
6503 6503 integrity, validating the hashes and checksums of each entry in
6504 6504 the changelog, manifest, and tracked files, as well as the
6505 6505 integrity of their crosslinks and indices.
6506 6506
6507 6507 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
6508 6508 for more information about recovery from corruption of the
6509 6509 repository.
6510 6510
6511 6511 Returns 0 on success, 1 if errors are encountered.
6512 6512 """
6513 6513 return hg.verify(repo)
6514 6514
6515 6515 @command('version', [], norepo=True)
6516 6516 def version_(ui):
6517 6517 """output version and copyright information"""
6518 6518 ui.write(_("Mercurial Distributed SCM (version %s)\n")
6519 6519 % util.version())
6520 6520 ui.status(_(
6521 6521 "(see http://mercurial.selenic.com for more information)\n"
6522 6522 "\nCopyright (C) 2005-2015 Matt Mackall and others\n"
6523 6523 "This is free software; see the source for copying conditions. "
6524 6524 "There is NO\nwarranty; "
6525 6525 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
6526 6526 ))
6527 6527
6528 6528 ui.note(_("\nEnabled extensions:\n\n"))
6529 6529 if ui.verbose:
6530 6530 # format names and versions into columns
6531 6531 names = []
6532 6532 vers = []
6533 6533 for name, module in extensions.extensions():
6534 6534 names.append(name)
6535 6535 vers.append(extensions.moduleversion(module))
6536 6536 if names:
6537 6537 maxnamelen = max(len(n) for n in names)
6538 6538 for i, name in enumerate(names):
6539 6539 ui.write(" %-*s %s\n" % (maxnamelen, name, vers[i]))
@@ -1,1006 +1,1006
1 1 # dirstate.py - working directory tracking for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid
9 9 from i18n import _
10 10 import scmutil, util, osutil, parsers, encoding, pathutil
11 11 import os, stat, errno
12 12 import match as matchmod
13 13
14 14 propertycache = util.propertycache
15 15 filecache = scmutil.filecache
16 16 _rangemask = 0x7fffffff
17 17
18 18 dirstatetuple = parsers.dirstatetuple
19 19
20 20 class repocache(filecache):
21 21 """filecache for files in .hg/"""
22 22 def join(self, obj, fname):
23 23 return obj._opener.join(fname)
24 24
25 25 class rootcache(filecache):
26 26 """filecache for files in the repository root"""
27 27 def join(self, obj, fname):
28 28 return obj._join(fname)
29 29
30 30 class dirstate(object):
31 31
32 32 def __init__(self, opener, ui, root, validate):
33 33 '''Create a new dirstate object.
34 34
35 35 opener is an open()-like callable that can be used to open the
36 36 dirstate file; root is the root of the directory tracked by
37 37 the dirstate.
38 38 '''
39 39 self._opener = opener
40 40 self._validate = validate
41 41 self._root = root
42 42 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
43 43 # UNC path pointing to root share (issue4557)
44 44 self._rootdir = pathutil.normasprefix(root)
45 45 self._dirty = False
46 46 self._dirtypl = False
47 47 self._lastnormaltime = 0
48 48 self._ui = ui
49 49 self._filecache = {}
50 50 self._parentwriters = 0
51 51 self._filename = 'dirstate'
52 52
53 53 def beginparentchange(self):
54 54 '''Marks the beginning of a set of changes that involve changing
55 55 the dirstate parents. If there is an exception during this time,
56 56 the dirstate will not be written when the wlock is released. This
57 57 prevents writing an incoherent dirstate where the parent doesn't
58 58 match the contents.
59 59 '''
60 60 self._parentwriters += 1
61 61
62 62 def endparentchange(self):
63 63 '''Marks the end of a set of changes that involve changing the
64 64 dirstate parents. Once all parent changes have been marked done,
65 65 the wlock will be free to write the dirstate on release.
66 66 '''
67 67 if self._parentwriters > 0:
68 68 self._parentwriters -= 1
69 69
70 70 def pendingparentchange(self):
71 71 '''Returns true if the dirstate is in the middle of a set of changes
72 72 that modify the dirstate parent.
73 73 '''
74 74 return self._parentwriters > 0
75 75
76 76 @propertycache
77 77 def _map(self):
78 78 '''Return the dirstate contents as a map from filename to
79 79 (state, mode, size, time).'''
80 80 self._read()
81 81 return self._map
82 82
83 83 @propertycache
84 84 def _copymap(self):
85 85 self._read()
86 86 return self._copymap
87 87
88 88 @propertycache
89 89 def _filefoldmap(self):
90 90 try:
91 91 makefilefoldmap = parsers.make_file_foldmap
92 92 except AttributeError:
93 93 pass
94 94 else:
95 95 return makefilefoldmap(self._map, util.normcasespec,
96 96 util.normcasefallback)
97 97
98 98 f = {}
99 99 normcase = util.normcase
100 100 for name, s in self._map.iteritems():
101 101 if s[0] != 'r':
102 102 f[normcase(name)] = name
103 103 f['.'] = '.' # prevents useless util.fspath() invocation
104 104 return f
105 105
106 106 @propertycache
107 107 def _dirfoldmap(self):
108 108 f = {}
109 109 normcase = util.normcase
110 110 for name in self._dirs:
111 111 f[normcase(name)] = name
112 112 return f
113 113
114 114 @repocache('branch')
115 115 def _branch(self):
116 116 try:
117 117 return self._opener.read("branch").strip() or "default"
118 118 except IOError, inst:
119 119 if inst.errno != errno.ENOENT:
120 120 raise
121 121 return "default"
122 122
123 123 @propertycache
124 124 def _pl(self):
125 125 try:
126 126 fp = self._opener(self._filename)
127 127 st = fp.read(40)
128 128 fp.close()
129 129 l = len(st)
130 130 if l == 40:
131 131 return st[:20], st[20:40]
132 132 elif l > 0 and l < 40:
133 133 raise util.Abort(_('working directory state appears damaged!'))
134 134 except IOError, err:
135 135 if err.errno != errno.ENOENT:
136 136 raise
137 137 return [nullid, nullid]
138 138
139 139 @propertycache
140 140 def _dirs(self):
141 141 return util.dirs(self._map, 'r')
142 142
143 143 def dirs(self):
144 144 return self._dirs
145 145
146 146 @rootcache('.hgignore')
147 147 def _ignore(self):
148 148 files = []
149 149 if os.path.exists(self._join('.hgignore')):
150 150 files.append(self._join('.hgignore'))
151 151 for name, path in self._ui.configitems("ui"):
152 152 if name == 'ignore' or name.startswith('ignore.'):
153 153 # we need to use os.path.join here rather than self._join
154 154 # because path is arbitrary and user-specified
155 155 files.append(os.path.join(self._rootdir, util.expandpath(path)))
156 156
157 157 if not files:
158 158 return util.never
159 159
160 160 pats = ['include:%s' % f for f in files]
161 161 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
162 162
163 163 @propertycache
164 164 def _slash(self):
165 165 return self._ui.configbool('ui', 'slash') and os.sep != '/'
166 166
167 167 @propertycache
168 168 def _checklink(self):
169 169 return util.checklink(self._root)
170 170
171 171 @propertycache
172 172 def _checkexec(self):
173 173 return util.checkexec(self._root)
174 174
175 175 @propertycache
176 176 def _checkcase(self):
177 177 return not util.checkcase(self._join('.hg'))
178 178
179 179 def _join(self, f):
180 180 # much faster than os.path.join()
181 181 # it's safe because f is always a relative path
182 182 return self._rootdir + f
183 183
184 184 def flagfunc(self, buildfallback):
185 185 if self._checklink and self._checkexec:
186 186 def f(x):
187 187 try:
188 188 st = os.lstat(self._join(x))
189 189 if util.statislink(st):
190 190 return 'l'
191 191 if util.statisexec(st):
192 192 return 'x'
193 193 except OSError:
194 194 pass
195 195 return ''
196 196 return f
197 197
198 198 fallback = buildfallback()
199 199 if self._checklink:
200 200 def f(x):
201 201 if os.path.islink(self._join(x)):
202 202 return 'l'
203 203 if 'x' in fallback(x):
204 204 return 'x'
205 205 return ''
206 206 return f
207 207 if self._checkexec:
208 208 def f(x):
209 209 if 'l' in fallback(x):
210 210 return 'l'
211 211 if util.isexec(self._join(x)):
212 212 return 'x'
213 213 return ''
214 214 return f
215 215 else:
216 216 return fallback
217 217
218 218 @propertycache
219 219 def _cwd(self):
220 220 return os.getcwd()
221 221
222 222 def getcwd(self):
223 223 cwd = self._cwd
224 224 if cwd == self._root:
225 225 return ''
226 226 # self._root ends with a path separator if self._root is '/' or 'C:\'
227 227 rootsep = self._root
228 228 if not util.endswithsep(rootsep):
229 229 rootsep += os.sep
230 230 if cwd.startswith(rootsep):
231 231 return cwd[len(rootsep):]
232 232 else:
233 233 # we're outside the repo. return an absolute path.
234 234 return cwd
235 235
236 236 def pathto(self, f, cwd=None):
237 237 if cwd is None:
238 238 cwd = self.getcwd()
239 239 path = util.pathto(self._root, cwd, f)
240 240 if self._slash:
241 241 return util.pconvert(path)
242 242 return path
243 243
244 244 def __getitem__(self, key):
245 245 '''Return the current state of key (a filename) in the dirstate.
246 246
247 247 States are:
248 248 n normal
249 249 m needs merging
250 250 r marked for removal
251 251 a marked for addition
252 252 ? not tracked
253 253 '''
254 254 return self._map.get(key, ("?",))[0]
255 255
256 256 def __contains__(self, key):
257 257 return key in self._map
258 258
259 259 def __iter__(self):
260 260 for x in sorted(self._map):
261 261 yield x
262 262
263 263 def iteritems(self):
264 264 return self._map.iteritems()
265 265
266 266 def parents(self):
267 267 return [self._validate(p) for p in self._pl]
268 268
269 269 def p1(self):
270 270 return self._validate(self._pl[0])
271 271
272 272 def p2(self):
273 273 return self._validate(self._pl[1])
274 274
275 275 def branch(self):
276 276 return encoding.tolocal(self._branch)
277 277
278 278 def setparents(self, p1, p2=nullid):
279 279 """Set dirstate parents to p1 and p2.
280 280
281 281 When moving from two parents to one, 'm' merged entries a
282 282 adjusted to normal and previous copy records discarded and
283 283 returned by the call.
284 284
285 285 See localrepo.setparents()
286 286 """
287 287 if self._parentwriters == 0:
288 288 raise ValueError("cannot set dirstate parent without "
289 289 "calling dirstate.beginparentchange")
290 290
291 291 self._dirty = self._dirtypl = True
292 292 oldp2 = self._pl[1]
293 293 self._pl = p1, p2
294 294 copies = {}
295 295 if oldp2 != nullid and p2 == nullid:
296 296 for f, s in self._map.iteritems():
297 297 # Discard 'm' markers when moving away from a merge state
298 298 if s[0] == 'm':
299 299 if f in self._copymap:
300 300 copies[f] = self._copymap[f]
301 301 self.normallookup(f)
302 302 # Also fix up otherparent markers
303 303 elif s[0] == 'n' and s[2] == -2:
304 304 if f in self._copymap:
305 305 copies[f] = self._copymap[f]
306 306 self.add(f)
307 307 return copies
308 308
309 309 def setbranch(self, branch):
310 310 self._branch = encoding.fromlocal(branch)
311 311 f = self._opener('branch', 'w', atomictemp=True)
312 312 try:
313 313 f.write(self._branch + '\n')
314 314 f.close()
315 315
316 316 # make sure filecache has the correct stat info for _branch after
317 317 # replacing the underlying file
318 318 ce = self._filecache['_branch']
319 319 if ce:
320 320 ce.refresh()
321 321 except: # re-raises
322 322 f.discard()
323 323 raise
324 324
325 325 def _read(self):
326 326 self._map = {}
327 327 self._copymap = {}
328 328 try:
329 329 fp = self._opener.open(self._filename)
330 330 try:
331 331 st = fp.read()
332 332 finally:
333 333 fp.close()
334 334 except IOError, err:
335 335 if err.errno != errno.ENOENT:
336 336 raise
337 337 return
338 338 if not st:
339 339 return
340 340
341 341 if util.safehasattr(parsers, 'dict_new_presized'):
342 342 # Make an estimate of the number of files in the dirstate based on
343 343 # its size. From a linear regression on a set of real-world repos,
344 344 # all over 10,000 files, the size of a dirstate entry is 85
345 345 # bytes. The cost of resizing is significantly higher than the cost
346 346 # of filling in a larger presized dict, so subtract 20% from the
347 347 # size.
348 348 #
349 349 # This heuristic is imperfect in many ways, so in a future dirstate
350 350 # format update it makes sense to just record the number of entries
351 351 # on write.
352 352 self._map = parsers.dict_new_presized(len(st) / 71)
353 353
354 354 # Python's garbage collector triggers a GC each time a certain number
355 355 # of container objects (the number being defined by
356 356 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
357 357 # for each file in the dirstate. The C version then immediately marks
358 358 # them as not to be tracked by the collector. However, this has no
359 359 # effect on when GCs are triggered, only on what objects the GC looks
360 360 # into. This means that O(number of files) GCs are unavoidable.
361 361 # Depending on when in the process's lifetime the dirstate is parsed,
362 362 # this can get very expensive. As a workaround, disable GC while
363 363 # parsing the dirstate.
364 364 #
365 365 # (we cannot decorate the function directly since it is in a C module)
366 366 parse_dirstate = util.nogc(parsers.parse_dirstate)
367 367 p = parse_dirstate(self._map, self._copymap, st)
368 368 if not self._dirtypl:
369 369 self._pl = p
370 370
371 371 def invalidate(self):
372 372 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
373 373 "_pl", "_dirs", "_ignore"):
374 374 if a in self.__dict__:
375 375 delattr(self, a)
376 376 self._lastnormaltime = 0
377 377 self._dirty = False
378 378 self._parentwriters = 0
379 379
380 380 def copy(self, source, dest):
381 381 """Mark dest as a copy of source. Unmark dest if source is None."""
382 382 if source == dest:
383 383 return
384 384 self._dirty = True
385 385 if source is not None:
386 386 self._copymap[dest] = source
387 387 elif dest in self._copymap:
388 388 del self._copymap[dest]
389 389
390 390 def copied(self, file):
391 391 return self._copymap.get(file, None)
392 392
393 393 def copies(self):
394 394 return self._copymap
395 395
396 396 def _droppath(self, f):
397 397 if self[f] not in "?r" and "_dirs" in self.__dict__:
398 398 self._dirs.delpath(f)
399 399
400 400 def _addpath(self, f, state, mode, size, mtime):
401 401 oldstate = self[f]
402 402 if state == 'a' or oldstate == 'r':
403 403 scmutil.checkfilename(f)
404 404 if f in self._dirs:
405 405 raise util.Abort(_('directory %r already in dirstate') % f)
406 406 # shadows
407 407 for d in util.finddirs(f):
408 408 if d in self._dirs:
409 409 break
410 410 if d in self._map and self[d] != 'r':
411 411 raise util.Abort(
412 412 _('file %r in dirstate clashes with %r') % (d, f))
413 413 if oldstate in "?r" and "_dirs" in self.__dict__:
414 414 self._dirs.addpath(f)
415 415 self._dirty = True
416 416 self._map[f] = dirstatetuple(state, mode, size, mtime)
417 417
418 418 def normal(self, f):
419 419 '''Mark a file normal and clean.'''
420 420 s = os.lstat(self._join(f))
421 421 mtime = int(s.st_mtime)
422 422 self._addpath(f, 'n', s.st_mode,
423 423 s.st_size & _rangemask, mtime & _rangemask)
424 424 if f in self._copymap:
425 425 del self._copymap[f]
426 426 if mtime > self._lastnormaltime:
427 427 # Remember the most recent modification timeslot for status(),
428 428 # to make sure we won't miss future size-preserving file content
429 429 # modifications that happen within the same timeslot.
430 430 self._lastnormaltime = mtime
431 431
432 432 def normallookup(self, f):
433 433 '''Mark a file normal, but possibly dirty.'''
434 434 if self._pl[1] != nullid and f in self._map:
435 435 # if there is a merge going on and the file was either
436 436 # in state 'm' (-1) or coming from other parent (-2) before
437 437 # being removed, restore that state.
438 438 entry = self._map[f]
439 439 if entry[0] == 'r' and entry[2] in (-1, -2):
440 440 source = self._copymap.get(f)
441 441 if entry[2] == -1:
442 442 self.merge(f)
443 443 elif entry[2] == -2:
444 444 self.otherparent(f)
445 445 if source:
446 446 self.copy(source, f)
447 447 return
448 448 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
449 449 return
450 450 self._addpath(f, 'n', 0, -1, -1)
451 451 if f in self._copymap:
452 452 del self._copymap[f]
453 453
454 454 def otherparent(self, f):
455 455 '''Mark as coming from the other parent, always dirty.'''
456 456 if self._pl[1] == nullid:
457 457 raise util.Abort(_("setting %r to other parent "
458 458 "only allowed in merges") % f)
459 459 if f in self and self[f] == 'n':
460 460 # merge-like
461 461 self._addpath(f, 'm', 0, -2, -1)
462 462 else:
463 463 # add-like
464 464 self._addpath(f, 'n', 0, -2, -1)
465 465
466 466 if f in self._copymap:
467 467 del self._copymap[f]
468 468
469 469 def add(self, f):
470 470 '''Mark a file added.'''
471 471 self._addpath(f, 'a', 0, -1, -1)
472 472 if f in self._copymap:
473 473 del self._copymap[f]
474 474
475 475 def remove(self, f):
476 476 '''Mark a file removed.'''
477 477 self._dirty = True
478 478 self._droppath(f)
479 479 size = 0
480 480 if self._pl[1] != nullid and f in self._map:
481 481 # backup the previous state
482 482 entry = self._map[f]
483 483 if entry[0] == 'm': # merge
484 484 size = -1
485 485 elif entry[0] == 'n' and entry[2] == -2: # other parent
486 486 size = -2
487 487 self._map[f] = dirstatetuple('r', 0, size, 0)
488 488 if size == 0 and f in self._copymap:
489 489 del self._copymap[f]
490 490
491 491 def merge(self, f):
492 492 '''Mark a file merged.'''
493 493 if self._pl[1] == nullid:
494 494 return self.normallookup(f)
495 495 return self.otherparent(f)
496 496
497 497 def drop(self, f):
498 498 '''Drop a file from the dirstate'''
499 499 if f in self._map:
500 500 self._dirty = True
501 501 self._droppath(f)
502 502 del self._map[f]
503 503
504 504 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
505 505 if exists is None:
506 506 exists = os.path.lexists(os.path.join(self._root, path))
507 507 if not exists:
508 508 # Maybe a path component exists
509 509 if not ignoremissing and '/' in path:
510 510 d, f = path.rsplit('/', 1)
511 511 d = self._normalize(d, False, ignoremissing, None)
512 512 folded = d + "/" + f
513 513 else:
514 514 # No path components, preserve original case
515 515 folded = path
516 516 else:
517 517 # recursively normalize leading directory components
518 518 # against dirstate
519 519 if '/' in normed:
520 520 d, f = normed.rsplit('/', 1)
521 521 d = self._normalize(d, False, ignoremissing, True)
522 522 r = self._root + "/" + d
523 523 folded = d + "/" + util.fspath(f, r)
524 524 else:
525 525 folded = util.fspath(normed, self._root)
526 526 storemap[normed] = folded
527 527
528 528 return folded
529 529
530 530 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
531 531 normed = util.normcase(path)
532 532 folded = self._filefoldmap.get(normed, None)
533 533 if folded is None:
534 534 if isknown:
535 535 folded = path
536 536 else:
537 537 folded = self._discoverpath(path, normed, ignoremissing, exists,
538 538 self._filefoldmap)
539 539 return folded
540 540
541 541 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
542 542 normed = util.normcase(path)
543 543 folded = self._filefoldmap.get(normed, None)
544 544 if folded is None:
545 545 folded = self._dirfoldmap.get(normed, None)
546 546 if folded is None:
547 547 if isknown:
548 548 folded = path
549 549 else:
550 550 # store discovered result in dirfoldmap so that future
551 551 # normalizefile calls don't start matching directories
552 552 folded = self._discoverpath(path, normed, ignoremissing, exists,
553 553 self._dirfoldmap)
554 554 return folded
555 555
556 556 def normalize(self, path, isknown=False, ignoremissing=False):
557 557 '''
558 558 normalize the case of a pathname when on a casefolding filesystem
559 559
560 560 isknown specifies whether the filename came from walking the
561 561 disk, to avoid extra filesystem access.
562 562
563 563 If ignoremissing is True, missing path are returned
564 564 unchanged. Otherwise, we try harder to normalize possibly
565 565 existing path components.
566 566
567 567 The normalized case is determined based on the following precedence:
568 568
569 569 - version of name already stored in the dirstate
570 570 - version of name stored on disk
571 571 - version provided via command arguments
572 572 '''
573 573
574 574 if self._checkcase:
575 575 return self._normalize(path, isknown, ignoremissing)
576 576 return path
577 577
578 578 def clear(self):
579 579 self._map = {}
580 580 if "_dirs" in self.__dict__:
581 581 delattr(self, "_dirs")
582 582 self._copymap = {}
583 583 self._pl = [nullid, nullid]
584 584 self._lastnormaltime = 0
585 585 self._dirty = True
586 586
587 587 def rebuild(self, parent, allfiles, changedfiles=None):
588 588 if changedfiles is None:
589 589 changedfiles = allfiles
590 590 oldmap = self._map
591 591 self.clear()
592 592 for f in allfiles:
593 593 if f not in changedfiles:
594 594 self._map[f] = oldmap[f]
595 595 else:
596 596 if 'x' in allfiles.flags(f):
597 self._map[f] = dirstatetuple('n', 0777, -1, 0)
597 self._map[f] = dirstatetuple('n', 0o777, -1, 0)
598 598 else:
599 self._map[f] = dirstatetuple('n', 0666, -1, 0)
599 self._map[f] = dirstatetuple('n', 0o666, -1, 0)
600 600 self._pl = (parent, nullid)
601 601 self._dirty = True
602 602
603 603 def write(self):
604 604 if not self._dirty:
605 605 return
606 606
607 607 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
608 608 # timestamp of each entries in dirstate, because of 'now > mtime'
609 609 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
610 610 if delaywrite > 0:
611 611 import time # to avoid useless import
612 612 time.sleep(delaywrite)
613 613
614 614 st = self._opener(self._filename, "w", atomictemp=True)
615 615 # use the modification time of the newly created temporary file as the
616 616 # filesystem's notion of 'now'
617 617 now = util.fstat(st).st_mtime
618 618 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
619 619 st.close()
620 620 self._lastnormaltime = 0
621 621 self._dirty = self._dirtypl = False
622 622
623 623 def _dirignore(self, f):
624 624 if f == '.':
625 625 return False
626 626 if self._ignore(f):
627 627 return True
628 628 for p in util.finddirs(f):
629 629 if self._ignore(p):
630 630 return True
631 631 return False
632 632
633 633 def _walkexplicit(self, match, subrepos):
634 634 '''Get stat data about the files explicitly specified by match.
635 635
636 636 Return a triple (results, dirsfound, dirsnotfound).
637 637 - results is a mapping from filename to stat result. It also contains
638 638 listings mapping subrepos and .hg to None.
639 639 - dirsfound is a list of files found to be directories.
640 640 - dirsnotfound is a list of files that the dirstate thinks are
641 641 directories and that were not found.'''
642 642
643 643 def badtype(mode):
644 644 kind = _('unknown')
645 645 if stat.S_ISCHR(mode):
646 646 kind = _('character device')
647 647 elif stat.S_ISBLK(mode):
648 648 kind = _('block device')
649 649 elif stat.S_ISFIFO(mode):
650 650 kind = _('fifo')
651 651 elif stat.S_ISSOCK(mode):
652 652 kind = _('socket')
653 653 elif stat.S_ISDIR(mode):
654 654 kind = _('directory')
655 655 return _('unsupported file type (type is %s)') % kind
656 656
657 657 matchedir = match.explicitdir
658 658 badfn = match.bad
659 659 dmap = self._map
660 660 lstat = os.lstat
661 661 getkind = stat.S_IFMT
662 662 dirkind = stat.S_IFDIR
663 663 regkind = stat.S_IFREG
664 664 lnkkind = stat.S_IFLNK
665 665 join = self._join
666 666 dirsfound = []
667 667 foundadd = dirsfound.append
668 668 dirsnotfound = []
669 669 notfoundadd = dirsnotfound.append
670 670
671 671 if not match.isexact() and self._checkcase:
672 672 normalize = self._normalize
673 673 else:
674 674 normalize = None
675 675
676 676 files = sorted(match.files())
677 677 subrepos.sort()
678 678 i, j = 0, 0
679 679 while i < len(files) and j < len(subrepos):
680 680 subpath = subrepos[j] + "/"
681 681 if files[i] < subpath:
682 682 i += 1
683 683 continue
684 684 while i < len(files) and files[i].startswith(subpath):
685 685 del files[i]
686 686 j += 1
687 687
688 688 if not files or '.' in files:
689 689 files = ['.']
690 690 results = dict.fromkeys(subrepos)
691 691 results['.hg'] = None
692 692
693 693 alldirs = None
694 694 for ff in files:
695 695 # constructing the foldmap is expensive, so don't do it for the
696 696 # common case where files is ['.']
697 697 if normalize and ff != '.':
698 698 nf = normalize(ff, False, True)
699 699 else:
700 700 nf = ff
701 701 if nf in results:
702 702 continue
703 703
704 704 try:
705 705 st = lstat(join(nf))
706 706 kind = getkind(st.st_mode)
707 707 if kind == dirkind:
708 708 if nf in dmap:
709 709 # file replaced by dir on disk but still in dirstate
710 710 results[nf] = None
711 711 if matchedir:
712 712 matchedir(nf)
713 713 foundadd((nf, ff))
714 714 elif kind == regkind or kind == lnkkind:
715 715 results[nf] = st
716 716 else:
717 717 badfn(ff, badtype(kind))
718 718 if nf in dmap:
719 719 results[nf] = None
720 720 except OSError, inst: # nf not found on disk - it is dirstate only
721 721 if nf in dmap: # does it exactly match a missing file?
722 722 results[nf] = None
723 723 else: # does it match a missing directory?
724 724 if alldirs is None:
725 725 alldirs = util.dirs(dmap)
726 726 if nf in alldirs:
727 727 if matchedir:
728 728 matchedir(nf)
729 729 notfoundadd(nf)
730 730 else:
731 731 badfn(ff, inst.strerror)
732 732
733 733 return results, dirsfound, dirsnotfound
734 734
735 735 def walk(self, match, subrepos, unknown, ignored, full=True):
736 736 '''
737 737 Walk recursively through the directory tree, finding all files
738 738 matched by match.
739 739
740 740 If full is False, maybe skip some known-clean files.
741 741
742 742 Return a dict mapping filename to stat-like object (either
743 743 mercurial.osutil.stat instance or return value of os.stat()).
744 744
745 745 '''
746 746 # full is a flag that extensions that hook into walk can use -- this
747 747 # implementation doesn't use it at all. This satisfies the contract
748 748 # because we only guarantee a "maybe".
749 749
750 750 if ignored:
751 751 ignore = util.never
752 752 dirignore = util.never
753 753 elif unknown:
754 754 ignore = self._ignore
755 755 dirignore = self._dirignore
756 756 else:
757 757 # if not unknown and not ignored, drop dir recursion and step 2
758 758 ignore = util.always
759 759 dirignore = util.always
760 760
761 761 matchfn = match.matchfn
762 762 matchalways = match.always()
763 763 matchtdir = match.traversedir
764 764 dmap = self._map
765 765 listdir = osutil.listdir
766 766 lstat = os.lstat
767 767 dirkind = stat.S_IFDIR
768 768 regkind = stat.S_IFREG
769 769 lnkkind = stat.S_IFLNK
770 770 join = self._join
771 771
772 772 exact = skipstep3 = False
773 773 if match.isexact(): # match.exact
774 774 exact = True
775 775 dirignore = util.always # skip step 2
776 776 elif match.prefix(): # match.match, no patterns
777 777 skipstep3 = True
778 778
779 779 if not exact and self._checkcase:
780 780 normalize = self._normalize
781 781 normalizefile = self._normalizefile
782 782 skipstep3 = False
783 783 else:
784 784 normalize = self._normalize
785 785 normalizefile = None
786 786
787 787 # step 1: find all explicit files
788 788 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
789 789
790 790 skipstep3 = skipstep3 and not (work or dirsnotfound)
791 791 work = [d for d in work if not dirignore(d[0])]
792 792
793 793 # step 2: visit subdirectories
794 794 def traverse(work, alreadynormed):
795 795 wadd = work.append
796 796 while work:
797 797 nd = work.pop()
798 798 skip = None
799 799 if nd == '.':
800 800 nd = ''
801 801 else:
802 802 skip = '.hg'
803 803 try:
804 804 entries = listdir(join(nd), stat=True, skip=skip)
805 805 except OSError, inst:
806 806 if inst.errno in (errno.EACCES, errno.ENOENT):
807 807 match.bad(self.pathto(nd), inst.strerror)
808 808 continue
809 809 raise
810 810 for f, kind, st in entries:
811 811 if normalizefile:
812 812 # even though f might be a directory, we're only
813 813 # interested in comparing it to files currently in the
814 814 # dmap -- therefore normalizefile is enough
815 815 nf = normalizefile(nd and (nd + "/" + f) or f, True,
816 816 True)
817 817 else:
818 818 nf = nd and (nd + "/" + f) or f
819 819 if nf not in results:
820 820 if kind == dirkind:
821 821 if not ignore(nf):
822 822 if matchtdir:
823 823 matchtdir(nf)
824 824 wadd(nf)
825 825 if nf in dmap and (matchalways or matchfn(nf)):
826 826 results[nf] = None
827 827 elif kind == regkind or kind == lnkkind:
828 828 if nf in dmap:
829 829 if matchalways or matchfn(nf):
830 830 results[nf] = st
831 831 elif ((matchalways or matchfn(nf))
832 832 and not ignore(nf)):
833 833 # unknown file -- normalize if necessary
834 834 if not alreadynormed:
835 835 nf = normalize(nf, False, True)
836 836 results[nf] = st
837 837 elif nf in dmap and (matchalways or matchfn(nf)):
838 838 results[nf] = None
839 839
840 840 for nd, d in work:
841 841 # alreadynormed means that processwork doesn't have to do any
842 842 # expensive directory normalization
843 843 alreadynormed = not normalize or nd == d
844 844 traverse([d], alreadynormed)
845 845
846 846 for s in subrepos:
847 847 del results[s]
848 848 del results['.hg']
849 849
850 850 # step 3: visit remaining files from dmap
851 851 if not skipstep3 and not exact:
852 852 # If a dmap file is not in results yet, it was either
853 853 # a) not matching matchfn b) ignored, c) missing, or d) under a
854 854 # symlink directory.
855 855 if not results and matchalways:
856 856 visit = dmap.keys()
857 857 else:
858 858 visit = [f for f in dmap if f not in results and matchfn(f)]
859 859 visit.sort()
860 860
861 861 if unknown:
862 862 # unknown == True means we walked all dirs under the roots
863 863 # that wasn't ignored, and everything that matched was stat'ed
864 864 # and is already in results.
865 865 # The rest must thus be ignored or under a symlink.
866 866 audit_path = pathutil.pathauditor(self._root)
867 867
868 868 for nf in iter(visit):
869 869 # If a stat for the same file was already added with a
870 870 # different case, don't add one for this, since that would
871 871 # make it appear as if the file exists under both names
872 872 # on disk.
873 873 if (normalizefile and
874 874 normalizefile(nf, True, True) in results):
875 875 results[nf] = None
876 876 # Report ignored items in the dmap as long as they are not
877 877 # under a symlink directory.
878 878 elif audit_path.check(nf):
879 879 try:
880 880 results[nf] = lstat(join(nf))
881 881 # file was just ignored, no links, and exists
882 882 except OSError:
883 883 # file doesn't exist
884 884 results[nf] = None
885 885 else:
886 886 # It's either missing or under a symlink directory
887 887 # which we in this case report as missing
888 888 results[nf] = None
889 889 else:
890 890 # We may not have walked the full directory tree above,
891 891 # so stat and check everything we missed.
892 892 nf = iter(visit).next
893 893 for st in util.statfiles([join(i) for i in visit]):
894 894 results[nf()] = st
895 895 return results
896 896
897 897 def status(self, match, subrepos, ignored, clean, unknown):
898 898 '''Determine the status of the working copy relative to the
899 899 dirstate and return a pair of (unsure, status), where status is of type
900 900 scmutil.status and:
901 901
902 902 unsure:
903 903 files that might have been modified since the dirstate was
904 904 written, but need to be read to be sure (size is the same
905 905 but mtime differs)
906 906 status.modified:
907 907 files that have definitely been modified since the dirstate
908 908 was written (different size or mode)
909 909 status.clean:
910 910 files that have definitely not been modified since the
911 911 dirstate was written
912 912 '''
913 913 listignored, listclean, listunknown = ignored, clean, unknown
914 914 lookup, modified, added, unknown, ignored = [], [], [], [], []
915 915 removed, deleted, clean = [], [], []
916 916
917 917 dmap = self._map
918 918 ladd = lookup.append # aka "unsure"
919 919 madd = modified.append
920 920 aadd = added.append
921 921 uadd = unknown.append
922 922 iadd = ignored.append
923 923 radd = removed.append
924 924 dadd = deleted.append
925 925 cadd = clean.append
926 926 mexact = match.exact
927 927 dirignore = self._dirignore
928 928 checkexec = self._checkexec
929 929 copymap = self._copymap
930 930 lastnormaltime = self._lastnormaltime
931 931
932 932 # We need to do full walks when either
933 933 # - we're listing all clean files, or
934 934 # - match.traversedir does something, because match.traversedir should
935 935 # be called for every dir in the working dir
936 936 full = listclean or match.traversedir is not None
937 937 for fn, st in self.walk(match, subrepos, listunknown, listignored,
938 938 full=full).iteritems():
939 939 if fn not in dmap:
940 940 if (listignored or mexact(fn)) and dirignore(fn):
941 941 if listignored:
942 942 iadd(fn)
943 943 else:
944 944 uadd(fn)
945 945 continue
946 946
947 947 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
948 948 # written like that for performance reasons. dmap[fn] is not a
949 949 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
950 950 # opcode has fast paths when the value to be unpacked is a tuple or
951 951 # a list, but falls back to creating a full-fledged iterator in
952 952 # general. That is much slower than simply accessing and storing the
953 953 # tuple members one by one.
954 954 t = dmap[fn]
955 955 state = t[0]
956 956 mode = t[1]
957 957 size = t[2]
958 958 time = t[3]
959 959
960 960 if not st and state in "nma":
961 961 dadd(fn)
962 962 elif state == 'n':
963 963 mtime = int(st.st_mtime)
964 964 if (size >= 0 and
965 965 ((size != st.st_size and size != st.st_size & _rangemask)
966 or ((mode ^ st.st_mode) & 0100 and checkexec))
966 or ((mode ^ st.st_mode) & 0o100 and checkexec))
967 967 or size == -2 # other parent
968 968 or fn in copymap):
969 969 madd(fn)
970 970 elif time != mtime and time != mtime & _rangemask:
971 971 ladd(fn)
972 972 elif mtime == lastnormaltime:
973 973 # fn may have just been marked as normal and it may have
974 974 # changed in the same second without changing its size.
975 975 # This can happen if we quickly do multiple commits.
976 976 # Force lookup, so we don't miss such a racy file change.
977 977 ladd(fn)
978 978 elif listclean:
979 979 cadd(fn)
980 980 elif state == 'm':
981 981 madd(fn)
982 982 elif state == 'a':
983 983 aadd(fn)
984 984 elif state == 'r':
985 985 radd(fn)
986 986
987 987 return (lookup, scmutil.status(modified, added, removed, deleted,
988 988 unknown, ignored, clean))
989 989
990 990 def matches(self, match):
991 991 '''
992 992 return files in the dirstate (in whatever state) filtered by match
993 993 '''
994 994 dmap = self._map
995 995 if match.always():
996 996 return dmap.keys()
997 997 files = match.files()
998 998 if match.isexact():
999 999 # fast path -- filter the other way around, since typically files is
1000 1000 # much smaller than dmap
1001 1001 return [f for f in files if f in dmap]
1002 1002 if match.prefix() and all(fn in dmap for fn in files):
1003 1003 # fast path -- all the values are known to be files, so just return
1004 1004 # that
1005 1005 return list(files)
1006 1006 return [f for f in dmap if match(f)]
@@ -1,2549 +1,2549
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 import collections
10 10 import cStringIO, email, os, errno, re, posixpath, copy
11 11 import tempfile, zlib, shutil
12 12
13 13 from i18n import _
14 14 from node import hex, short
15 15 import base85, mdiff, scmutil, util, diffhelpers, copies, encoding, error
16 16 import pathutil
17 17
18 18 gitre = re.compile('diff --git a/(.*) b/(.*)')
19 19 tabsplitter = re.compile(r'(\t+|[^\t]+)')
20 20
21 21 class PatchError(Exception):
22 22 pass
23 23
24 24
25 25 # public functions
26 26
27 27 def split(stream):
28 28 '''return an iterator of individual patches from a stream'''
29 29 def isheader(line, inheader):
30 30 if inheader and line[0] in (' ', '\t'):
31 31 # continuation
32 32 return True
33 33 if line[0] in (' ', '-', '+'):
34 34 # diff line - don't check for header pattern in there
35 35 return False
36 36 l = line.split(': ', 1)
37 37 return len(l) == 2 and ' ' not in l[0]
38 38
39 39 def chunk(lines):
40 40 return cStringIO.StringIO(''.join(lines))
41 41
42 42 def hgsplit(stream, cur):
43 43 inheader = True
44 44
45 45 for line in stream:
46 46 if not line.strip():
47 47 inheader = False
48 48 if not inheader and line.startswith('# HG changeset patch'):
49 49 yield chunk(cur)
50 50 cur = []
51 51 inheader = True
52 52
53 53 cur.append(line)
54 54
55 55 if cur:
56 56 yield chunk(cur)
57 57
58 58 def mboxsplit(stream, cur):
59 59 for line in stream:
60 60 if line.startswith('From '):
61 61 for c in split(chunk(cur[1:])):
62 62 yield c
63 63 cur = []
64 64
65 65 cur.append(line)
66 66
67 67 if cur:
68 68 for c in split(chunk(cur[1:])):
69 69 yield c
70 70
71 71 def mimesplit(stream, cur):
72 72 def msgfp(m):
73 73 fp = cStringIO.StringIO()
74 74 g = email.Generator.Generator(fp, mangle_from_=False)
75 75 g.flatten(m)
76 76 fp.seek(0)
77 77 return fp
78 78
79 79 for line in stream:
80 80 cur.append(line)
81 81 c = chunk(cur)
82 82
83 83 m = email.Parser.Parser().parse(c)
84 84 if not m.is_multipart():
85 85 yield msgfp(m)
86 86 else:
87 87 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
88 88 for part in m.walk():
89 89 ct = part.get_content_type()
90 90 if ct not in ok_types:
91 91 continue
92 92 yield msgfp(part)
93 93
94 94 def headersplit(stream, cur):
95 95 inheader = False
96 96
97 97 for line in stream:
98 98 if not inheader and isheader(line, inheader):
99 99 yield chunk(cur)
100 100 cur = []
101 101 inheader = True
102 102 if inheader and not isheader(line, inheader):
103 103 inheader = False
104 104
105 105 cur.append(line)
106 106
107 107 if cur:
108 108 yield chunk(cur)
109 109
110 110 def remainder(cur):
111 111 yield chunk(cur)
112 112
113 113 class fiter(object):
114 114 def __init__(self, fp):
115 115 self.fp = fp
116 116
117 117 def __iter__(self):
118 118 return self
119 119
120 120 def next(self):
121 121 l = self.fp.readline()
122 122 if not l:
123 123 raise StopIteration
124 124 return l
125 125
126 126 inheader = False
127 127 cur = []
128 128
129 129 mimeheaders = ['content-type']
130 130
131 131 if not util.safehasattr(stream, 'next'):
132 132 # http responses, for example, have readline but not next
133 133 stream = fiter(stream)
134 134
135 135 for line in stream:
136 136 cur.append(line)
137 137 if line.startswith('# HG changeset patch'):
138 138 return hgsplit(stream, cur)
139 139 elif line.startswith('From '):
140 140 return mboxsplit(stream, cur)
141 141 elif isheader(line, inheader):
142 142 inheader = True
143 143 if line.split(':', 1)[0].lower() in mimeheaders:
144 144 # let email parser handle this
145 145 return mimesplit(stream, cur)
146 146 elif line.startswith('--- ') and inheader:
147 147 # No evil headers seen by diff start, split by hand
148 148 return headersplit(stream, cur)
149 149 # Not enough info, keep reading
150 150
151 151 # if we are here, we have a very plain patch
152 152 return remainder(cur)
153 153
154 154 def extract(ui, fileobj):
155 155 '''extract patch from data read from fileobj.
156 156
157 157 patch can be a normal patch or contained in an email message.
158 158
159 159 return tuple (filename, message, user, date, branch, node, p1, p2).
160 160 Any item in the returned tuple can be None. If filename is None,
161 161 fileobj did not contain a patch. Caller must unlink filename when done.'''
162 162
163 163 # attempt to detect the start of a patch
164 164 # (this heuristic is borrowed from quilt)
165 165 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
166 166 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
167 167 r'---[ \t].*?^\+\+\+[ \t]|'
168 168 r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL)
169 169
170 170 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
171 171 tmpfp = os.fdopen(fd, 'w')
172 172 try:
173 173 msg = email.Parser.Parser().parse(fileobj)
174 174
175 175 subject = msg['Subject']
176 176 user = msg['From']
177 177 if not subject and not user:
178 178 # Not an email, restore parsed headers if any
179 179 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
180 180
181 181 # should try to parse msg['Date']
182 182 date = None
183 183 nodeid = None
184 184 branch = None
185 185 parents = []
186 186
187 187 if subject:
188 188 if subject.startswith('[PATCH'):
189 189 pend = subject.find(']')
190 190 if pend >= 0:
191 191 subject = subject[pend + 1:].lstrip()
192 192 subject = re.sub(r'\n[ \t]+', ' ', subject)
193 193 ui.debug('Subject: %s\n' % subject)
194 194 if user:
195 195 ui.debug('From: %s\n' % user)
196 196 diffs_seen = 0
197 197 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
198 198 message = ''
199 199 for part in msg.walk():
200 200 content_type = part.get_content_type()
201 201 ui.debug('Content-Type: %s\n' % content_type)
202 202 if content_type not in ok_types:
203 203 continue
204 204 payload = part.get_payload(decode=True)
205 205 m = diffre.search(payload)
206 206 if m:
207 207 hgpatch = False
208 208 hgpatchheader = False
209 209 ignoretext = False
210 210
211 211 ui.debug('found patch at byte %d\n' % m.start(0))
212 212 diffs_seen += 1
213 213 cfp = cStringIO.StringIO()
214 214 for line in payload[:m.start(0)].splitlines():
215 215 if line.startswith('# HG changeset patch') and not hgpatch:
216 216 ui.debug('patch generated by hg export\n')
217 217 hgpatch = True
218 218 hgpatchheader = True
219 219 # drop earlier commit message content
220 220 cfp.seek(0)
221 221 cfp.truncate()
222 222 subject = None
223 223 elif hgpatchheader:
224 224 if line.startswith('# User '):
225 225 user = line[7:]
226 226 ui.debug('From: %s\n' % user)
227 227 elif line.startswith("# Date "):
228 228 date = line[7:]
229 229 elif line.startswith("# Branch "):
230 230 branch = line[9:]
231 231 elif line.startswith("# Node ID "):
232 232 nodeid = line[10:]
233 233 elif line.startswith("# Parent "):
234 234 parents.append(line[9:].lstrip())
235 235 elif not line.startswith("# "):
236 236 hgpatchheader = False
237 237 elif line == '---':
238 238 ignoretext = True
239 239 if not hgpatchheader and not ignoretext:
240 240 cfp.write(line)
241 241 cfp.write('\n')
242 242 message = cfp.getvalue()
243 243 if tmpfp:
244 244 tmpfp.write(payload)
245 245 if not payload.endswith('\n'):
246 246 tmpfp.write('\n')
247 247 elif not diffs_seen and message and content_type == 'text/plain':
248 248 message += '\n' + payload
249 249 except: # re-raises
250 250 tmpfp.close()
251 251 os.unlink(tmpname)
252 252 raise
253 253
254 254 if subject and not message.startswith(subject):
255 255 message = '%s\n%s' % (subject, message)
256 256 tmpfp.close()
257 257 if not diffs_seen:
258 258 os.unlink(tmpname)
259 259 return None, message, user, date, branch, None, None, None
260 260
261 261 if parents:
262 262 p1 = parents.pop(0)
263 263 else:
264 264 p1 = None
265 265
266 266 if parents:
267 267 p2 = parents.pop(0)
268 268 else:
269 269 p2 = None
270 270
271 271 return tmpname, message, user, date, branch, nodeid, p1, p2
272 272
273 273 class patchmeta(object):
274 274 """Patched file metadata
275 275
276 276 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
277 277 or COPY. 'path' is patched file path. 'oldpath' is set to the
278 278 origin file when 'op' is either COPY or RENAME, None otherwise. If
279 279 file mode is changed, 'mode' is a tuple (islink, isexec) where
280 280 'islink' is True if the file is a symlink and 'isexec' is True if
281 281 the file is executable. Otherwise, 'mode' is None.
282 282 """
283 283 def __init__(self, path):
284 284 self.path = path
285 285 self.oldpath = None
286 286 self.mode = None
287 287 self.op = 'MODIFY'
288 288 self.binary = False
289 289
290 290 def setmode(self, mode):
291 islink = mode & 020000
292 isexec = mode & 0100
291 islink = mode & 0o20000
292 isexec = mode & 0o100
293 293 self.mode = (islink, isexec)
294 294
295 295 def copy(self):
296 296 other = patchmeta(self.path)
297 297 other.oldpath = self.oldpath
298 298 other.mode = self.mode
299 299 other.op = self.op
300 300 other.binary = self.binary
301 301 return other
302 302
303 303 def _ispatchinga(self, afile):
304 304 if afile == '/dev/null':
305 305 return self.op == 'ADD'
306 306 return afile == 'a/' + (self.oldpath or self.path)
307 307
308 308 def _ispatchingb(self, bfile):
309 309 if bfile == '/dev/null':
310 310 return self.op == 'DELETE'
311 311 return bfile == 'b/' + self.path
312 312
313 313 def ispatching(self, afile, bfile):
314 314 return self._ispatchinga(afile) and self._ispatchingb(bfile)
315 315
316 316 def __repr__(self):
317 317 return "<patchmeta %s %r>" % (self.op, self.path)
318 318
319 319 def readgitpatch(lr):
320 320 """extract git-style metadata about patches from <patchname>"""
321 321
322 322 # Filter patch for git information
323 323 gp = None
324 324 gitpatches = []
325 325 for line in lr:
326 326 line = line.rstrip(' \r\n')
327 327 if line.startswith('diff --git a/'):
328 328 m = gitre.match(line)
329 329 if m:
330 330 if gp:
331 331 gitpatches.append(gp)
332 332 dst = m.group(2)
333 333 gp = patchmeta(dst)
334 334 elif gp:
335 335 if line.startswith('--- '):
336 336 gitpatches.append(gp)
337 337 gp = None
338 338 continue
339 339 if line.startswith('rename from '):
340 340 gp.op = 'RENAME'
341 341 gp.oldpath = line[12:]
342 342 elif line.startswith('rename to '):
343 343 gp.path = line[10:]
344 344 elif line.startswith('copy from '):
345 345 gp.op = 'COPY'
346 346 gp.oldpath = line[10:]
347 347 elif line.startswith('copy to '):
348 348 gp.path = line[8:]
349 349 elif line.startswith('deleted file'):
350 350 gp.op = 'DELETE'
351 351 elif line.startswith('new file mode '):
352 352 gp.op = 'ADD'
353 353 gp.setmode(int(line[-6:], 8))
354 354 elif line.startswith('new mode '):
355 355 gp.setmode(int(line[-6:], 8))
356 356 elif line.startswith('GIT binary patch'):
357 357 gp.binary = True
358 358 if gp:
359 359 gitpatches.append(gp)
360 360
361 361 return gitpatches
362 362
363 363 class linereader(object):
364 364 # simple class to allow pushing lines back into the input stream
365 365 def __init__(self, fp):
366 366 self.fp = fp
367 367 self.buf = []
368 368
369 369 def push(self, line):
370 370 if line is not None:
371 371 self.buf.append(line)
372 372
373 373 def readline(self):
374 374 if self.buf:
375 375 l = self.buf[0]
376 376 del self.buf[0]
377 377 return l
378 378 return self.fp.readline()
379 379
380 380 def __iter__(self):
381 381 while True:
382 382 l = self.readline()
383 383 if not l:
384 384 break
385 385 yield l
386 386
387 387 class abstractbackend(object):
388 388 def __init__(self, ui):
389 389 self.ui = ui
390 390
391 391 def getfile(self, fname):
392 392 """Return target file data and flags as a (data, (islink,
393 393 isexec)) tuple. Data is None if file is missing/deleted.
394 394 """
395 395 raise NotImplementedError
396 396
397 397 def setfile(self, fname, data, mode, copysource):
398 398 """Write data to target file fname and set its mode. mode is a
399 399 (islink, isexec) tuple. If data is None, the file content should
400 400 be left unchanged. If the file is modified after being copied,
401 401 copysource is set to the original file name.
402 402 """
403 403 raise NotImplementedError
404 404
405 405 def unlink(self, fname):
406 406 """Unlink target file."""
407 407 raise NotImplementedError
408 408
409 409 def writerej(self, fname, failed, total, lines):
410 410 """Write rejected lines for fname. total is the number of hunks
411 411 which failed to apply and total the total number of hunks for this
412 412 files.
413 413 """
414 414 pass
415 415
416 416 def exists(self, fname):
417 417 raise NotImplementedError
418 418
419 419 class fsbackend(abstractbackend):
420 420 def __init__(self, ui, basedir):
421 421 super(fsbackend, self).__init__(ui)
422 422 self.opener = scmutil.opener(basedir)
423 423
424 424 def _join(self, f):
425 425 return os.path.join(self.opener.base, f)
426 426
427 427 def getfile(self, fname):
428 428 if self.opener.islink(fname):
429 429 return (self.opener.readlink(fname), (True, False))
430 430
431 431 isexec = False
432 432 try:
433 isexec = self.opener.lstat(fname).st_mode & 0100 != 0
433 isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
434 434 except OSError, e:
435 435 if e.errno != errno.ENOENT:
436 436 raise
437 437 try:
438 438 return (self.opener.read(fname), (False, isexec))
439 439 except IOError, e:
440 440 if e.errno != errno.ENOENT:
441 441 raise
442 442 return None, None
443 443
444 444 def setfile(self, fname, data, mode, copysource):
445 445 islink, isexec = mode
446 446 if data is None:
447 447 self.opener.setflags(fname, islink, isexec)
448 448 return
449 449 if islink:
450 450 self.opener.symlink(data, fname)
451 451 else:
452 452 self.opener.write(fname, data)
453 453 if isexec:
454 454 self.opener.setflags(fname, False, True)
455 455
456 456 def unlink(self, fname):
457 457 self.opener.unlinkpath(fname, ignoremissing=True)
458 458
459 459 def writerej(self, fname, failed, total, lines):
460 460 fname = fname + ".rej"
461 461 self.ui.warn(
462 462 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
463 463 (failed, total, fname))
464 464 fp = self.opener(fname, 'w')
465 465 fp.writelines(lines)
466 466 fp.close()
467 467
468 468 def exists(self, fname):
469 469 return self.opener.lexists(fname)
470 470
471 471 class workingbackend(fsbackend):
472 472 def __init__(self, ui, repo, similarity):
473 473 super(workingbackend, self).__init__(ui, repo.root)
474 474 self.repo = repo
475 475 self.similarity = similarity
476 476 self.removed = set()
477 477 self.changed = set()
478 478 self.copied = []
479 479
480 480 def _checkknown(self, fname):
481 481 if self.repo.dirstate[fname] == '?' and self.exists(fname):
482 482 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
483 483
484 484 def setfile(self, fname, data, mode, copysource):
485 485 self._checkknown(fname)
486 486 super(workingbackend, self).setfile(fname, data, mode, copysource)
487 487 if copysource is not None:
488 488 self.copied.append((copysource, fname))
489 489 self.changed.add(fname)
490 490
491 491 def unlink(self, fname):
492 492 self._checkknown(fname)
493 493 super(workingbackend, self).unlink(fname)
494 494 self.removed.add(fname)
495 495 self.changed.add(fname)
496 496
497 497 def close(self):
498 498 wctx = self.repo[None]
499 499 changed = set(self.changed)
500 500 for src, dst in self.copied:
501 501 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
502 502 if self.removed:
503 503 wctx.forget(sorted(self.removed))
504 504 for f in self.removed:
505 505 if f not in self.repo.dirstate:
506 506 # File was deleted and no longer belongs to the
507 507 # dirstate, it was probably marked added then
508 508 # deleted, and should not be considered by
509 509 # marktouched().
510 510 changed.discard(f)
511 511 if changed:
512 512 scmutil.marktouched(self.repo, changed, self.similarity)
513 513 return sorted(self.changed)
514 514
515 515 class filestore(object):
516 516 def __init__(self, maxsize=None):
517 517 self.opener = None
518 518 self.files = {}
519 519 self.created = 0
520 520 self.maxsize = maxsize
521 521 if self.maxsize is None:
522 522 self.maxsize = 4*(2**20)
523 523 self.size = 0
524 524 self.data = {}
525 525
526 526 def setfile(self, fname, data, mode, copied=None):
527 527 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
528 528 self.data[fname] = (data, mode, copied)
529 529 self.size += len(data)
530 530 else:
531 531 if self.opener is None:
532 532 root = tempfile.mkdtemp(prefix='hg-patch-')
533 533 self.opener = scmutil.opener(root)
534 534 # Avoid filename issues with these simple names
535 535 fn = str(self.created)
536 536 self.opener.write(fn, data)
537 537 self.created += 1
538 538 self.files[fname] = (fn, mode, copied)
539 539
540 540 def getfile(self, fname):
541 541 if fname in self.data:
542 542 return self.data[fname]
543 543 if not self.opener or fname not in self.files:
544 544 return None, None, None
545 545 fn, mode, copied = self.files[fname]
546 546 return self.opener.read(fn), mode, copied
547 547
548 548 def close(self):
549 549 if self.opener:
550 550 shutil.rmtree(self.opener.base)
551 551
552 552 class repobackend(abstractbackend):
553 553 def __init__(self, ui, repo, ctx, store):
554 554 super(repobackend, self).__init__(ui)
555 555 self.repo = repo
556 556 self.ctx = ctx
557 557 self.store = store
558 558 self.changed = set()
559 559 self.removed = set()
560 560 self.copied = {}
561 561
562 562 def _checkknown(self, fname):
563 563 if fname not in self.ctx:
564 564 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
565 565
566 566 def getfile(self, fname):
567 567 try:
568 568 fctx = self.ctx[fname]
569 569 except error.LookupError:
570 570 return None, None
571 571 flags = fctx.flags()
572 572 return fctx.data(), ('l' in flags, 'x' in flags)
573 573
574 574 def setfile(self, fname, data, mode, copysource):
575 575 if copysource:
576 576 self._checkknown(copysource)
577 577 if data is None:
578 578 data = self.ctx[fname].data()
579 579 self.store.setfile(fname, data, mode, copysource)
580 580 self.changed.add(fname)
581 581 if copysource:
582 582 self.copied[fname] = copysource
583 583
584 584 def unlink(self, fname):
585 585 self._checkknown(fname)
586 586 self.removed.add(fname)
587 587
588 588 def exists(self, fname):
589 589 return fname in self.ctx
590 590
591 591 def close(self):
592 592 return self.changed | self.removed
593 593
594 594 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
595 595 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
596 596 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
597 597 eolmodes = ['strict', 'crlf', 'lf', 'auto']
598 598
599 599 class patchfile(object):
600 600 def __init__(self, ui, gp, backend, store, eolmode='strict'):
601 601 self.fname = gp.path
602 602 self.eolmode = eolmode
603 603 self.eol = None
604 604 self.backend = backend
605 605 self.ui = ui
606 606 self.lines = []
607 607 self.exists = False
608 608 self.missing = True
609 609 self.mode = gp.mode
610 610 self.copysource = gp.oldpath
611 611 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
612 612 self.remove = gp.op == 'DELETE'
613 613 if self.copysource is None:
614 614 data, mode = backend.getfile(self.fname)
615 615 else:
616 616 data, mode = store.getfile(self.copysource)[:2]
617 617 if data is not None:
618 618 self.exists = self.copysource is None or backend.exists(self.fname)
619 619 self.missing = False
620 620 if data:
621 621 self.lines = mdiff.splitnewlines(data)
622 622 if self.mode is None:
623 623 self.mode = mode
624 624 if self.lines:
625 625 # Normalize line endings
626 626 if self.lines[0].endswith('\r\n'):
627 627 self.eol = '\r\n'
628 628 elif self.lines[0].endswith('\n'):
629 629 self.eol = '\n'
630 630 if eolmode != 'strict':
631 631 nlines = []
632 632 for l in self.lines:
633 633 if l.endswith('\r\n'):
634 634 l = l[:-2] + '\n'
635 635 nlines.append(l)
636 636 self.lines = nlines
637 637 else:
638 638 if self.create:
639 639 self.missing = False
640 640 if self.mode is None:
641 641 self.mode = (False, False)
642 642 if self.missing:
643 643 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
644 644
645 645 self.hash = {}
646 646 self.dirty = 0
647 647 self.offset = 0
648 648 self.skew = 0
649 649 self.rej = []
650 650 self.fileprinted = False
651 651 self.printfile(False)
652 652 self.hunks = 0
653 653
654 654 def writelines(self, fname, lines, mode):
655 655 if self.eolmode == 'auto':
656 656 eol = self.eol
657 657 elif self.eolmode == 'crlf':
658 658 eol = '\r\n'
659 659 else:
660 660 eol = '\n'
661 661
662 662 if self.eolmode != 'strict' and eol and eol != '\n':
663 663 rawlines = []
664 664 for l in lines:
665 665 if l and l[-1] == '\n':
666 666 l = l[:-1] + eol
667 667 rawlines.append(l)
668 668 lines = rawlines
669 669
670 670 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
671 671
672 672 def printfile(self, warn):
673 673 if self.fileprinted:
674 674 return
675 675 if warn or self.ui.verbose:
676 676 self.fileprinted = True
677 677 s = _("patching file %s\n") % self.fname
678 678 if warn:
679 679 self.ui.warn(s)
680 680 else:
681 681 self.ui.note(s)
682 682
683 683
684 684 def findlines(self, l, linenum):
685 685 # looks through the hash and finds candidate lines. The
686 686 # result is a list of line numbers sorted based on distance
687 687 # from linenum
688 688
689 689 cand = self.hash.get(l, [])
690 690 if len(cand) > 1:
691 691 # resort our list of potentials forward then back.
692 692 cand.sort(key=lambda x: abs(x - linenum))
693 693 return cand
694 694
695 695 def write_rej(self):
696 696 # our rejects are a little different from patch(1). This always
697 697 # creates rejects in the same form as the original patch. A file
698 698 # header is inserted so that you can run the reject through patch again
699 699 # without having to type the filename.
700 700 if not self.rej:
701 701 return
702 702 base = os.path.basename(self.fname)
703 703 lines = ["--- %s\n+++ %s\n" % (base, base)]
704 704 for x in self.rej:
705 705 for l in x.hunk:
706 706 lines.append(l)
707 707 if l[-1] != '\n':
708 708 lines.append("\n\ No newline at end of file\n")
709 709 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
710 710
711 711 def apply(self, h):
712 712 if not h.complete():
713 713 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
714 714 (h.number, h.desc, len(h.a), h.lena, len(h.b),
715 715 h.lenb))
716 716
717 717 self.hunks += 1
718 718
719 719 if self.missing:
720 720 self.rej.append(h)
721 721 return -1
722 722
723 723 if self.exists and self.create:
724 724 if self.copysource:
725 725 self.ui.warn(_("cannot create %s: destination already "
726 726 "exists\n") % self.fname)
727 727 else:
728 728 self.ui.warn(_("file %s already exists\n") % self.fname)
729 729 self.rej.append(h)
730 730 return -1
731 731
732 732 if isinstance(h, binhunk):
733 733 if self.remove:
734 734 self.backend.unlink(self.fname)
735 735 else:
736 736 l = h.new(self.lines)
737 737 self.lines[:] = l
738 738 self.offset += len(l)
739 739 self.dirty = True
740 740 return 0
741 741
742 742 horig = h
743 743 if (self.eolmode in ('crlf', 'lf')
744 744 or self.eolmode == 'auto' and self.eol):
745 745 # If new eols are going to be normalized, then normalize
746 746 # hunk data before patching. Otherwise, preserve input
747 747 # line-endings.
748 748 h = h.getnormalized()
749 749
750 750 # fast case first, no offsets, no fuzz
751 751 old, oldstart, new, newstart = h.fuzzit(0, False)
752 752 oldstart += self.offset
753 753 orig_start = oldstart
754 754 # if there's skew we want to emit the "(offset %d lines)" even
755 755 # when the hunk cleanly applies at start + skew, so skip the
756 756 # fast case code
757 757 if (self.skew == 0 and
758 758 diffhelpers.testhunk(old, self.lines, oldstart) == 0):
759 759 if self.remove:
760 760 self.backend.unlink(self.fname)
761 761 else:
762 762 self.lines[oldstart:oldstart + len(old)] = new
763 763 self.offset += len(new) - len(old)
764 764 self.dirty = True
765 765 return 0
766 766
767 767 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
768 768 self.hash = {}
769 769 for x, s in enumerate(self.lines):
770 770 self.hash.setdefault(s, []).append(x)
771 771
772 772 for fuzzlen in xrange(self.ui.configint("patch", "fuzz", 2) + 1):
773 773 for toponly in [True, False]:
774 774 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
775 775 oldstart = oldstart + self.offset + self.skew
776 776 oldstart = min(oldstart, len(self.lines))
777 777 if old:
778 778 cand = self.findlines(old[0][1:], oldstart)
779 779 else:
780 780 # Only adding lines with no or fuzzed context, just
781 781 # take the skew in account
782 782 cand = [oldstart]
783 783
784 784 for l in cand:
785 785 if not old or diffhelpers.testhunk(old, self.lines, l) == 0:
786 786 self.lines[l : l + len(old)] = new
787 787 self.offset += len(new) - len(old)
788 788 self.skew = l - orig_start
789 789 self.dirty = True
790 790 offset = l - orig_start - fuzzlen
791 791 if fuzzlen:
792 792 msg = _("Hunk #%d succeeded at %d "
793 793 "with fuzz %d "
794 794 "(offset %d lines).\n")
795 795 self.printfile(True)
796 796 self.ui.warn(msg %
797 797 (h.number, l + 1, fuzzlen, offset))
798 798 else:
799 799 msg = _("Hunk #%d succeeded at %d "
800 800 "(offset %d lines).\n")
801 801 self.ui.note(msg % (h.number, l + 1, offset))
802 802 return fuzzlen
803 803 self.printfile(True)
804 804 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
805 805 self.rej.append(horig)
806 806 return -1
807 807
808 808 def close(self):
809 809 if self.dirty:
810 810 self.writelines(self.fname, self.lines, self.mode)
811 811 self.write_rej()
812 812 return len(self.rej)
813 813
814 814 class header(object):
815 815 """patch header
816 816 """
817 817 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
818 818 diff_re = re.compile('diff -r .* (.*)$')
819 819 allhunks_re = re.compile('(?:index|deleted file) ')
820 820 pretty_re = re.compile('(?:new file|deleted file) ')
821 821 special_re = re.compile('(?:index|deleted|copy|rename) ')
822 822 newfile_re = re.compile('(?:new file)')
823 823
824 824 def __init__(self, header):
825 825 self.header = header
826 826 self.hunks = []
827 827
828 828 def binary(self):
829 829 return any(h.startswith('index ') for h in self.header)
830 830
831 831 def pretty(self, fp):
832 832 for h in self.header:
833 833 if h.startswith('index '):
834 834 fp.write(_('this modifies a binary file (all or nothing)\n'))
835 835 break
836 836 if self.pretty_re.match(h):
837 837 fp.write(h)
838 838 if self.binary():
839 839 fp.write(_('this is a binary file\n'))
840 840 break
841 841 if h.startswith('---'):
842 842 fp.write(_('%d hunks, %d lines changed\n') %
843 843 (len(self.hunks),
844 844 sum([max(h.added, h.removed) for h in self.hunks])))
845 845 break
846 846 fp.write(h)
847 847
848 848 def write(self, fp):
849 849 fp.write(''.join(self.header))
850 850
851 851 def allhunks(self):
852 852 return any(self.allhunks_re.match(h) for h in self.header)
853 853
854 854 def files(self):
855 855 match = self.diffgit_re.match(self.header[0])
856 856 if match:
857 857 fromfile, tofile = match.groups()
858 858 if fromfile == tofile:
859 859 return [fromfile]
860 860 return [fromfile, tofile]
861 861 else:
862 862 return self.diff_re.match(self.header[0]).groups()
863 863
864 864 def filename(self):
865 865 return self.files()[-1]
866 866
867 867 def __repr__(self):
868 868 return '<header %s>' % (' '.join(map(repr, self.files())))
869 869
870 870 def isnewfile(self):
871 871 return any(self.newfile_re.match(h) for h in self.header)
872 872
873 873 def special(self):
874 874 # Special files are shown only at the header level and not at the hunk
875 875 # level for example a file that has been deleted is a special file.
876 876 # The user cannot change the content of the operation, in the case of
877 877 # the deleted file he has to take the deletion or not take it, he
878 878 # cannot take some of it.
879 879 # Newly added files are special if they are empty, they are not special
880 880 # if they have some content as we want to be able to change it
881 881 nocontent = len(self.header) == 2
882 882 emptynewfile = self.isnewfile() and nocontent
883 883 return emptynewfile or \
884 884 any(self.special_re.match(h) for h in self.header)
885 885
886 886 class recordhunk(object):
887 887 """patch hunk
888 888
889 889 XXX shouldn't we merge this with the other hunk class?
890 890 """
891 891 maxcontext = 3
892 892
893 893 def __init__(self, header, fromline, toline, proc, before, hunk, after):
894 894 def trimcontext(number, lines):
895 895 delta = len(lines) - self.maxcontext
896 896 if False and delta > 0:
897 897 return number + delta, lines[:self.maxcontext]
898 898 return number, lines
899 899
900 900 self.header = header
901 901 self.fromline, self.before = trimcontext(fromline, before)
902 902 self.toline, self.after = trimcontext(toline, after)
903 903 self.proc = proc
904 904 self.hunk = hunk
905 905 self.added, self.removed = self.countchanges(self.hunk)
906 906
907 907 def __eq__(self, v):
908 908 if not isinstance(v, recordhunk):
909 909 return False
910 910
911 911 return ((v.hunk == self.hunk) and
912 912 (v.proc == self.proc) and
913 913 (self.fromline == v.fromline) and
914 914 (self.header.files() == v.header.files()))
915 915
916 916 def __hash__(self):
917 917 return hash((tuple(self.hunk),
918 918 tuple(self.header.files()),
919 919 self.fromline,
920 920 self.proc))
921 921
922 922 def countchanges(self, hunk):
923 923 """hunk -> (n+,n-)"""
924 924 add = len([h for h in hunk if h[0] == '+'])
925 925 rem = len([h for h in hunk if h[0] == '-'])
926 926 return add, rem
927 927
928 928 def write(self, fp):
929 929 delta = len(self.before) + len(self.after)
930 930 if self.after and self.after[-1] == '\\ No newline at end of file\n':
931 931 delta -= 1
932 932 fromlen = delta + self.removed
933 933 tolen = delta + self.added
934 934 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
935 935 (self.fromline, fromlen, self.toline, tolen,
936 936 self.proc and (' ' + self.proc)))
937 937 fp.write(''.join(self.before + self.hunk + self.after))
938 938
939 939 pretty = write
940 940
941 941 def filename(self):
942 942 return self.header.filename()
943 943
944 944 def __repr__(self):
945 945 return '<hunk %r@%d>' % (self.filename(), self.fromline)
946 946
947 947 def filterpatch(ui, headers, operation=None):
948 948 """Interactively filter patch chunks into applied-only chunks"""
949 949 if operation is None:
950 950 operation = _('record')
951 951
952 952 def prompt(skipfile, skipall, query, chunk):
953 953 """prompt query, and process base inputs
954 954
955 955 - y/n for the rest of file
956 956 - y/n for the rest
957 957 - ? (help)
958 958 - q (quit)
959 959
960 960 Return True/False and possibly updated skipfile and skipall.
961 961 """
962 962 newpatches = None
963 963 if skipall is not None:
964 964 return skipall, skipfile, skipall, newpatches
965 965 if skipfile is not None:
966 966 return skipfile, skipfile, skipall, newpatches
967 967 while True:
968 968 resps = _('[Ynesfdaq?]'
969 969 '$$ &Yes, record this change'
970 970 '$$ &No, skip this change'
971 971 '$$ &Edit this change manually'
972 972 '$$ &Skip remaining changes to this file'
973 973 '$$ Record remaining changes to this &file'
974 974 '$$ &Done, skip remaining changes and files'
975 975 '$$ Record &all changes to all remaining files'
976 976 '$$ &Quit, recording no changes'
977 977 '$$ &? (display help)')
978 978 r = ui.promptchoice("%s %s" % (query, resps))
979 979 ui.write("\n")
980 980 if r == 8: # ?
981 981 for c, t in ui.extractchoices(resps)[1]:
982 982 ui.write('%s - %s\n' % (c, t.lower()))
983 983 continue
984 984 elif r == 0: # yes
985 985 ret = True
986 986 elif r == 1: # no
987 987 ret = False
988 988 elif r == 2: # Edit patch
989 989 if chunk is None:
990 990 ui.write(_('cannot edit patch for whole file'))
991 991 ui.write("\n")
992 992 continue
993 993 if chunk.header.binary():
994 994 ui.write(_('cannot edit patch for binary file'))
995 995 ui.write("\n")
996 996 continue
997 997 # Patch comment based on the Git one (based on comment at end of
998 998 # http://mercurial.selenic.com/wiki/RecordExtension)
999 999 phelp = '---' + _("""
1000 1000 To remove '-' lines, make them ' ' lines (context).
1001 1001 To remove '+' lines, delete them.
1002 1002 Lines starting with # will be removed from the patch.
1003 1003
1004 1004 If the patch applies cleanly, the edited hunk will immediately be
1005 1005 added to the record list. If it does not apply cleanly, a rejects
1006 1006 file will be generated: you can use that when you try again. If
1007 1007 all lines of the hunk are removed, then the edit is aborted and
1008 1008 the hunk is left unchanged.
1009 1009 """)
1010 1010 (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
1011 1011 suffix=".diff", text=True)
1012 1012 ncpatchfp = None
1013 1013 try:
1014 1014 # Write the initial patch
1015 1015 f = os.fdopen(patchfd, "w")
1016 1016 chunk.header.write(f)
1017 1017 chunk.write(f)
1018 1018 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
1019 1019 f.close()
1020 1020 # Start the editor and wait for it to complete
1021 1021 editor = ui.geteditor()
1022 1022 ret = ui.system("%s \"%s\"" % (editor, patchfn),
1023 1023 environ={'HGUSER': ui.username()})
1024 1024 if ret != 0:
1025 1025 ui.warn(_("editor exited with exit code %d\n") % ret)
1026 1026 continue
1027 1027 # Remove comment lines
1028 1028 patchfp = open(patchfn)
1029 1029 ncpatchfp = cStringIO.StringIO()
1030 1030 for line in patchfp:
1031 1031 if not line.startswith('#'):
1032 1032 ncpatchfp.write(line)
1033 1033 patchfp.close()
1034 1034 ncpatchfp.seek(0)
1035 1035 newpatches = parsepatch(ncpatchfp)
1036 1036 finally:
1037 1037 os.unlink(patchfn)
1038 1038 del ncpatchfp
1039 1039 # Signal that the chunk shouldn't be applied as-is, but
1040 1040 # provide the new patch to be used instead.
1041 1041 ret = False
1042 1042 elif r == 3: # Skip
1043 1043 ret = skipfile = False
1044 1044 elif r == 4: # file (Record remaining)
1045 1045 ret = skipfile = True
1046 1046 elif r == 5: # done, skip remaining
1047 1047 ret = skipall = False
1048 1048 elif r == 6: # all
1049 1049 ret = skipall = True
1050 1050 elif r == 7: # quit
1051 1051 raise util.Abort(_('user quit'))
1052 1052 return ret, skipfile, skipall, newpatches
1053 1053
1054 1054 seen = set()
1055 1055 applied = {} # 'filename' -> [] of chunks
1056 1056 skipfile, skipall = None, None
1057 1057 pos, total = 1, sum(len(h.hunks) for h in headers)
1058 1058 for h in headers:
1059 1059 pos += len(h.hunks)
1060 1060 skipfile = None
1061 1061 fixoffset = 0
1062 1062 hdr = ''.join(h.header)
1063 1063 if hdr in seen:
1064 1064 continue
1065 1065 seen.add(hdr)
1066 1066 if skipall is None:
1067 1067 h.pretty(ui)
1068 1068 msg = (_('examine changes to %s?') %
1069 1069 _(' and ').join("'%s'" % f for f in h.files()))
1070 1070 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1071 1071 if not r:
1072 1072 continue
1073 1073 applied[h.filename()] = [h]
1074 1074 if h.allhunks():
1075 1075 applied[h.filename()] += h.hunks
1076 1076 continue
1077 1077 for i, chunk in enumerate(h.hunks):
1078 1078 if skipfile is None and skipall is None:
1079 1079 chunk.pretty(ui)
1080 1080 if total == 1:
1081 1081 msg = _("record this change to '%s'?") % chunk.filename()
1082 1082 else:
1083 1083 idx = pos - len(h.hunks) + i
1084 1084 msg = _("record change %d/%d to '%s'?") % (idx, total,
1085 1085 chunk.filename())
1086 1086 r, skipfile, skipall, newpatches = prompt(skipfile,
1087 1087 skipall, msg, chunk)
1088 1088 if r:
1089 1089 if fixoffset:
1090 1090 chunk = copy.copy(chunk)
1091 1091 chunk.toline += fixoffset
1092 1092 applied[chunk.filename()].append(chunk)
1093 1093 elif newpatches is not None:
1094 1094 for newpatch in newpatches:
1095 1095 for newhunk in newpatch.hunks:
1096 1096 if fixoffset:
1097 1097 newhunk.toline += fixoffset
1098 1098 applied[newhunk.filename()].append(newhunk)
1099 1099 else:
1100 1100 fixoffset += chunk.removed - chunk.added
1101 1101 return sum([h for h in applied.itervalues()
1102 1102 if h[0].special() or len(h) > 1], [])
1103 1103 class hunk(object):
1104 1104 def __init__(self, desc, num, lr, context):
1105 1105 self.number = num
1106 1106 self.desc = desc
1107 1107 self.hunk = [desc]
1108 1108 self.a = []
1109 1109 self.b = []
1110 1110 self.starta = self.lena = None
1111 1111 self.startb = self.lenb = None
1112 1112 if lr is not None:
1113 1113 if context:
1114 1114 self.read_context_hunk(lr)
1115 1115 else:
1116 1116 self.read_unified_hunk(lr)
1117 1117
1118 1118 def getnormalized(self):
1119 1119 """Return a copy with line endings normalized to LF."""
1120 1120
1121 1121 def normalize(lines):
1122 1122 nlines = []
1123 1123 for line in lines:
1124 1124 if line.endswith('\r\n'):
1125 1125 line = line[:-2] + '\n'
1126 1126 nlines.append(line)
1127 1127 return nlines
1128 1128
1129 1129 # Dummy object, it is rebuilt manually
1130 1130 nh = hunk(self.desc, self.number, None, None)
1131 1131 nh.number = self.number
1132 1132 nh.desc = self.desc
1133 1133 nh.hunk = self.hunk
1134 1134 nh.a = normalize(self.a)
1135 1135 nh.b = normalize(self.b)
1136 1136 nh.starta = self.starta
1137 1137 nh.startb = self.startb
1138 1138 nh.lena = self.lena
1139 1139 nh.lenb = self.lenb
1140 1140 return nh
1141 1141
1142 1142 def read_unified_hunk(self, lr):
1143 1143 m = unidesc.match(self.desc)
1144 1144 if not m:
1145 1145 raise PatchError(_("bad hunk #%d") % self.number)
1146 1146 self.starta, self.lena, self.startb, self.lenb = m.groups()
1147 1147 if self.lena is None:
1148 1148 self.lena = 1
1149 1149 else:
1150 1150 self.lena = int(self.lena)
1151 1151 if self.lenb is None:
1152 1152 self.lenb = 1
1153 1153 else:
1154 1154 self.lenb = int(self.lenb)
1155 1155 self.starta = int(self.starta)
1156 1156 self.startb = int(self.startb)
1157 1157 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
1158 1158 self.b)
1159 1159 # if we hit eof before finishing out the hunk, the last line will
1160 1160 # be zero length. Lets try to fix it up.
1161 1161 while len(self.hunk[-1]) == 0:
1162 1162 del self.hunk[-1]
1163 1163 del self.a[-1]
1164 1164 del self.b[-1]
1165 1165 self.lena -= 1
1166 1166 self.lenb -= 1
1167 1167 self._fixnewline(lr)
1168 1168
1169 1169 def read_context_hunk(self, lr):
1170 1170 self.desc = lr.readline()
1171 1171 m = contextdesc.match(self.desc)
1172 1172 if not m:
1173 1173 raise PatchError(_("bad hunk #%d") % self.number)
1174 1174 self.starta, aend = m.groups()
1175 1175 self.starta = int(self.starta)
1176 1176 if aend is None:
1177 1177 aend = self.starta
1178 1178 self.lena = int(aend) - self.starta
1179 1179 if self.starta:
1180 1180 self.lena += 1
1181 1181 for x in xrange(self.lena):
1182 1182 l = lr.readline()
1183 1183 if l.startswith('---'):
1184 1184 # lines addition, old block is empty
1185 1185 lr.push(l)
1186 1186 break
1187 1187 s = l[2:]
1188 1188 if l.startswith('- ') or l.startswith('! '):
1189 1189 u = '-' + s
1190 1190 elif l.startswith(' '):
1191 1191 u = ' ' + s
1192 1192 else:
1193 1193 raise PatchError(_("bad hunk #%d old text line %d") %
1194 1194 (self.number, x))
1195 1195 self.a.append(u)
1196 1196 self.hunk.append(u)
1197 1197
1198 1198 l = lr.readline()
1199 1199 if l.startswith('\ '):
1200 1200 s = self.a[-1][:-1]
1201 1201 self.a[-1] = s
1202 1202 self.hunk[-1] = s
1203 1203 l = lr.readline()
1204 1204 m = contextdesc.match(l)
1205 1205 if not m:
1206 1206 raise PatchError(_("bad hunk #%d") % self.number)
1207 1207 self.startb, bend = m.groups()
1208 1208 self.startb = int(self.startb)
1209 1209 if bend is None:
1210 1210 bend = self.startb
1211 1211 self.lenb = int(bend) - self.startb
1212 1212 if self.startb:
1213 1213 self.lenb += 1
1214 1214 hunki = 1
1215 1215 for x in xrange(self.lenb):
1216 1216 l = lr.readline()
1217 1217 if l.startswith('\ '):
1218 1218 # XXX: the only way to hit this is with an invalid line range.
1219 1219 # The no-eol marker is not counted in the line range, but I
1220 1220 # guess there are diff(1) out there which behave differently.
1221 1221 s = self.b[-1][:-1]
1222 1222 self.b[-1] = s
1223 1223 self.hunk[hunki - 1] = s
1224 1224 continue
1225 1225 if not l:
1226 1226 # line deletions, new block is empty and we hit EOF
1227 1227 lr.push(l)
1228 1228 break
1229 1229 s = l[2:]
1230 1230 if l.startswith('+ ') or l.startswith('! '):
1231 1231 u = '+' + s
1232 1232 elif l.startswith(' '):
1233 1233 u = ' ' + s
1234 1234 elif len(self.b) == 0:
1235 1235 # line deletions, new block is empty
1236 1236 lr.push(l)
1237 1237 break
1238 1238 else:
1239 1239 raise PatchError(_("bad hunk #%d old text line %d") %
1240 1240 (self.number, x))
1241 1241 self.b.append(s)
1242 1242 while True:
1243 1243 if hunki >= len(self.hunk):
1244 1244 h = ""
1245 1245 else:
1246 1246 h = self.hunk[hunki]
1247 1247 hunki += 1
1248 1248 if h == u:
1249 1249 break
1250 1250 elif h.startswith('-'):
1251 1251 continue
1252 1252 else:
1253 1253 self.hunk.insert(hunki - 1, u)
1254 1254 break
1255 1255
1256 1256 if not self.a:
1257 1257 # this happens when lines were only added to the hunk
1258 1258 for x in self.hunk:
1259 1259 if x.startswith('-') or x.startswith(' '):
1260 1260 self.a.append(x)
1261 1261 if not self.b:
1262 1262 # this happens when lines were only deleted from the hunk
1263 1263 for x in self.hunk:
1264 1264 if x.startswith('+') or x.startswith(' '):
1265 1265 self.b.append(x[1:])
1266 1266 # @@ -start,len +start,len @@
1267 1267 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
1268 1268 self.startb, self.lenb)
1269 1269 self.hunk[0] = self.desc
1270 1270 self._fixnewline(lr)
1271 1271
1272 1272 def _fixnewline(self, lr):
1273 1273 l = lr.readline()
1274 1274 if l.startswith('\ '):
1275 1275 diffhelpers.fix_newline(self.hunk, self.a, self.b)
1276 1276 else:
1277 1277 lr.push(l)
1278 1278
1279 1279 def complete(self):
1280 1280 return len(self.a) == self.lena and len(self.b) == self.lenb
1281 1281
1282 1282 def _fuzzit(self, old, new, fuzz, toponly):
1283 1283 # this removes context lines from the top and bottom of list 'l'. It
1284 1284 # checks the hunk to make sure only context lines are removed, and then
1285 1285 # returns a new shortened list of lines.
1286 1286 fuzz = min(fuzz, len(old))
1287 1287 if fuzz:
1288 1288 top = 0
1289 1289 bot = 0
1290 1290 hlen = len(self.hunk)
1291 1291 for x in xrange(hlen - 1):
1292 1292 # the hunk starts with the @@ line, so use x+1
1293 1293 if self.hunk[x + 1][0] == ' ':
1294 1294 top += 1
1295 1295 else:
1296 1296 break
1297 1297 if not toponly:
1298 1298 for x in xrange(hlen - 1):
1299 1299 if self.hunk[hlen - bot - 1][0] == ' ':
1300 1300 bot += 1
1301 1301 else:
1302 1302 break
1303 1303
1304 1304 bot = min(fuzz, bot)
1305 1305 top = min(fuzz, top)
1306 1306 return old[top:len(old) - bot], new[top:len(new) - bot], top
1307 1307 return old, new, 0
1308 1308
1309 1309 def fuzzit(self, fuzz, toponly):
1310 1310 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1311 1311 oldstart = self.starta + top
1312 1312 newstart = self.startb + top
1313 1313 # zero length hunk ranges already have their start decremented
1314 1314 if self.lena and oldstart > 0:
1315 1315 oldstart -= 1
1316 1316 if self.lenb and newstart > 0:
1317 1317 newstart -= 1
1318 1318 return old, oldstart, new, newstart
1319 1319
1320 1320 class binhunk(object):
1321 1321 'A binary patch file.'
1322 1322 def __init__(self, lr, fname):
1323 1323 self.text = None
1324 1324 self.delta = False
1325 1325 self.hunk = ['GIT binary patch\n']
1326 1326 self._fname = fname
1327 1327 self._read(lr)
1328 1328
1329 1329 def complete(self):
1330 1330 return self.text is not None
1331 1331
1332 1332 def new(self, lines):
1333 1333 if self.delta:
1334 1334 return [applybindelta(self.text, ''.join(lines))]
1335 1335 return [self.text]
1336 1336
1337 1337 def _read(self, lr):
1338 1338 def getline(lr, hunk):
1339 1339 l = lr.readline()
1340 1340 hunk.append(l)
1341 1341 return l.rstrip('\r\n')
1342 1342
1343 1343 size = 0
1344 1344 while True:
1345 1345 line = getline(lr, self.hunk)
1346 1346 if not line:
1347 1347 raise PatchError(_('could not extract "%s" binary data')
1348 1348 % self._fname)
1349 1349 if line.startswith('literal '):
1350 1350 size = int(line[8:].rstrip())
1351 1351 break
1352 1352 if line.startswith('delta '):
1353 1353 size = int(line[6:].rstrip())
1354 1354 self.delta = True
1355 1355 break
1356 1356 dec = []
1357 1357 line = getline(lr, self.hunk)
1358 1358 while len(line) > 1:
1359 1359 l = line[0]
1360 1360 if l <= 'Z' and l >= 'A':
1361 1361 l = ord(l) - ord('A') + 1
1362 1362 else:
1363 1363 l = ord(l) - ord('a') + 27
1364 1364 try:
1365 1365 dec.append(base85.b85decode(line[1:])[:l])
1366 1366 except ValueError, e:
1367 1367 raise PatchError(_('could not decode "%s" binary patch: %s')
1368 1368 % (self._fname, str(e)))
1369 1369 line = getline(lr, self.hunk)
1370 1370 text = zlib.decompress(''.join(dec))
1371 1371 if len(text) != size:
1372 1372 raise PatchError(_('"%s" length is %d bytes, should be %d')
1373 1373 % (self._fname, len(text), size))
1374 1374 self.text = text
1375 1375
1376 1376 def parsefilename(str):
1377 1377 # --- filename \t|space stuff
1378 1378 s = str[4:].rstrip('\r\n')
1379 1379 i = s.find('\t')
1380 1380 if i < 0:
1381 1381 i = s.find(' ')
1382 1382 if i < 0:
1383 1383 return s
1384 1384 return s[:i]
1385 1385
1386 1386 def reversehunks(hunks):
1387 1387 '''reverse the signs in the hunks given as argument
1388 1388
1389 1389 This function operates on hunks coming out of patch.filterpatch, that is
1390 1390 a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
1391 1391
1392 1392 >>> rawpatch = """diff --git a/folder1/g b/folder1/g
1393 1393 ... --- a/folder1/g
1394 1394 ... +++ b/folder1/g
1395 1395 ... @@ -1,7 +1,7 @@
1396 1396 ... +firstline
1397 1397 ... c
1398 1398 ... 1
1399 1399 ... 2
1400 1400 ... + 3
1401 1401 ... -4
1402 1402 ... 5
1403 1403 ... d
1404 1404 ... +lastline"""
1405 1405 >>> hunks = parsepatch(rawpatch)
1406 1406 >>> hunkscomingfromfilterpatch = []
1407 1407 >>> for h in hunks:
1408 1408 ... hunkscomingfromfilterpatch.append(h)
1409 1409 ... hunkscomingfromfilterpatch.extend(h.hunks)
1410 1410
1411 1411 >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
1412 1412 >>> fp = cStringIO.StringIO()
1413 1413 >>> for c in reversedhunks:
1414 1414 ... c.write(fp)
1415 1415 >>> fp.seek(0)
1416 1416 >>> reversedpatch = fp.read()
1417 1417 >>> print reversedpatch
1418 1418 diff --git a/folder1/g b/folder1/g
1419 1419 --- a/folder1/g
1420 1420 +++ b/folder1/g
1421 1421 @@ -1,4 +1,3 @@
1422 1422 -firstline
1423 1423 c
1424 1424 1
1425 1425 2
1426 1426 @@ -1,6 +2,6 @@
1427 1427 c
1428 1428 1
1429 1429 2
1430 1430 - 3
1431 1431 +4
1432 1432 5
1433 1433 d
1434 1434 @@ -5,3 +6,2 @@
1435 1435 5
1436 1436 d
1437 1437 -lastline
1438 1438
1439 1439 '''
1440 1440
1441 1441 import crecord as crecordmod
1442 1442 newhunks = []
1443 1443 for c in hunks:
1444 1444 if isinstance(c, crecordmod.uihunk):
1445 1445 # curses hunks encapsulate the record hunk in _hunk
1446 1446 c = c._hunk
1447 1447 if isinstance(c, recordhunk):
1448 1448 for j, line in enumerate(c.hunk):
1449 1449 if line.startswith("-"):
1450 1450 c.hunk[j] = "+" + c.hunk[j][1:]
1451 1451 elif line.startswith("+"):
1452 1452 c.hunk[j] = "-" + c.hunk[j][1:]
1453 1453 c.added, c.removed = c.removed, c.added
1454 1454 newhunks.append(c)
1455 1455 return newhunks
1456 1456
1457 1457 def parsepatch(originalchunks):
1458 1458 """patch -> [] of headers -> [] of hunks """
1459 1459 class parser(object):
1460 1460 """patch parsing state machine"""
1461 1461 def __init__(self):
1462 1462 self.fromline = 0
1463 1463 self.toline = 0
1464 1464 self.proc = ''
1465 1465 self.header = None
1466 1466 self.context = []
1467 1467 self.before = []
1468 1468 self.hunk = []
1469 1469 self.headers = []
1470 1470
1471 1471 def addrange(self, limits):
1472 1472 fromstart, fromend, tostart, toend, proc = limits
1473 1473 self.fromline = int(fromstart)
1474 1474 self.toline = int(tostart)
1475 1475 self.proc = proc
1476 1476
1477 1477 def addcontext(self, context):
1478 1478 if self.hunk:
1479 1479 h = recordhunk(self.header, self.fromline, self.toline,
1480 1480 self.proc, self.before, self.hunk, context)
1481 1481 self.header.hunks.append(h)
1482 1482 self.fromline += len(self.before) + h.removed
1483 1483 self.toline += len(self.before) + h.added
1484 1484 self.before = []
1485 1485 self.hunk = []
1486 1486 self.proc = ''
1487 1487 self.context = context
1488 1488
1489 1489 def addhunk(self, hunk):
1490 1490 if self.context:
1491 1491 self.before = self.context
1492 1492 self.context = []
1493 1493 self.hunk = hunk
1494 1494
1495 1495 def newfile(self, hdr):
1496 1496 self.addcontext([])
1497 1497 h = header(hdr)
1498 1498 self.headers.append(h)
1499 1499 self.header = h
1500 1500
1501 1501 def addother(self, line):
1502 1502 pass # 'other' lines are ignored
1503 1503
1504 1504 def finished(self):
1505 1505 self.addcontext([])
1506 1506 return self.headers
1507 1507
1508 1508 transitions = {
1509 1509 'file': {'context': addcontext,
1510 1510 'file': newfile,
1511 1511 'hunk': addhunk,
1512 1512 'range': addrange},
1513 1513 'context': {'file': newfile,
1514 1514 'hunk': addhunk,
1515 1515 'range': addrange,
1516 1516 'other': addother},
1517 1517 'hunk': {'context': addcontext,
1518 1518 'file': newfile,
1519 1519 'range': addrange},
1520 1520 'range': {'context': addcontext,
1521 1521 'hunk': addhunk},
1522 1522 'other': {'other': addother},
1523 1523 }
1524 1524
1525 1525 p = parser()
1526 1526 fp = cStringIO.StringIO()
1527 1527 fp.write(''.join(originalchunks))
1528 1528 fp.seek(0)
1529 1529
1530 1530 state = 'context'
1531 1531 for newstate, data in scanpatch(fp):
1532 1532 try:
1533 1533 p.transitions[state][newstate](p, data)
1534 1534 except KeyError:
1535 1535 raise PatchError('unhandled transition: %s -> %s' %
1536 1536 (state, newstate))
1537 1537 state = newstate
1538 1538 del fp
1539 1539 return p.finished()
1540 1540
1541 1541 def pathtransform(path, strip, prefix):
1542 1542 '''turn a path from a patch into a path suitable for the repository
1543 1543
1544 1544 prefix, if not empty, is expected to be normalized with a / at the end.
1545 1545
1546 1546 Returns (stripped components, path in repository).
1547 1547
1548 1548 >>> pathtransform('a/b/c', 0, '')
1549 1549 ('', 'a/b/c')
1550 1550 >>> pathtransform(' a/b/c ', 0, '')
1551 1551 ('', ' a/b/c')
1552 1552 >>> pathtransform(' a/b/c ', 2, '')
1553 1553 ('a/b/', 'c')
1554 1554 >>> pathtransform('a/b/c', 0, 'd/e/')
1555 1555 ('', 'd/e/a/b/c')
1556 1556 >>> pathtransform(' a//b/c ', 2, 'd/e/')
1557 1557 ('a//b/', 'd/e/c')
1558 1558 >>> pathtransform('a/b/c', 3, '')
1559 1559 Traceback (most recent call last):
1560 1560 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1561 1561 '''
1562 1562 pathlen = len(path)
1563 1563 i = 0
1564 1564 if strip == 0:
1565 1565 return '', prefix + path.rstrip()
1566 1566 count = strip
1567 1567 while count > 0:
1568 1568 i = path.find('/', i)
1569 1569 if i == -1:
1570 1570 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1571 1571 (count, strip, path))
1572 1572 i += 1
1573 1573 # consume '//' in the path
1574 1574 while i < pathlen - 1 and path[i] == '/':
1575 1575 i += 1
1576 1576 count -= 1
1577 1577 return path[:i].lstrip(), prefix + path[i:].rstrip()
1578 1578
1579 1579 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1580 1580 nulla = afile_orig == "/dev/null"
1581 1581 nullb = bfile_orig == "/dev/null"
1582 1582 create = nulla and hunk.starta == 0 and hunk.lena == 0
1583 1583 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1584 1584 abase, afile = pathtransform(afile_orig, strip, prefix)
1585 1585 gooda = not nulla and backend.exists(afile)
1586 1586 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1587 1587 if afile == bfile:
1588 1588 goodb = gooda
1589 1589 else:
1590 1590 goodb = not nullb and backend.exists(bfile)
1591 1591 missing = not goodb and not gooda and not create
1592 1592
1593 1593 # some diff programs apparently produce patches where the afile is
1594 1594 # not /dev/null, but afile starts with bfile
1595 1595 abasedir = afile[:afile.rfind('/') + 1]
1596 1596 bbasedir = bfile[:bfile.rfind('/') + 1]
1597 1597 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1598 1598 and hunk.starta == 0 and hunk.lena == 0):
1599 1599 create = True
1600 1600 missing = False
1601 1601
1602 1602 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1603 1603 # diff is between a file and its backup. In this case, the original
1604 1604 # file should be patched (see original mpatch code).
1605 1605 isbackup = (abase == bbase and bfile.startswith(afile))
1606 1606 fname = None
1607 1607 if not missing:
1608 1608 if gooda and goodb:
1609 1609 if isbackup:
1610 1610 fname = afile
1611 1611 else:
1612 1612 fname = bfile
1613 1613 elif gooda:
1614 1614 fname = afile
1615 1615
1616 1616 if not fname:
1617 1617 if not nullb:
1618 1618 if isbackup:
1619 1619 fname = afile
1620 1620 else:
1621 1621 fname = bfile
1622 1622 elif not nulla:
1623 1623 fname = afile
1624 1624 else:
1625 1625 raise PatchError(_("undefined source and destination files"))
1626 1626
1627 1627 gp = patchmeta(fname)
1628 1628 if create:
1629 1629 gp.op = 'ADD'
1630 1630 elif remove:
1631 1631 gp.op = 'DELETE'
1632 1632 return gp
1633 1633
1634 1634 def scanpatch(fp):
1635 1635 """like patch.iterhunks, but yield different events
1636 1636
1637 1637 - ('file', [header_lines + fromfile + tofile])
1638 1638 - ('context', [context_lines])
1639 1639 - ('hunk', [hunk_lines])
1640 1640 - ('range', (-start,len, +start,len, proc))
1641 1641 """
1642 1642 lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1643 1643 lr = linereader(fp)
1644 1644
1645 1645 def scanwhile(first, p):
1646 1646 """scan lr while predicate holds"""
1647 1647 lines = [first]
1648 1648 while True:
1649 1649 line = lr.readline()
1650 1650 if not line:
1651 1651 break
1652 1652 if p(line):
1653 1653 lines.append(line)
1654 1654 else:
1655 1655 lr.push(line)
1656 1656 break
1657 1657 return lines
1658 1658
1659 1659 while True:
1660 1660 line = lr.readline()
1661 1661 if not line:
1662 1662 break
1663 1663 if line.startswith('diff --git a/') or line.startswith('diff -r '):
1664 1664 def notheader(line):
1665 1665 s = line.split(None, 1)
1666 1666 return not s or s[0] not in ('---', 'diff')
1667 1667 header = scanwhile(line, notheader)
1668 1668 fromfile = lr.readline()
1669 1669 if fromfile.startswith('---'):
1670 1670 tofile = lr.readline()
1671 1671 header += [fromfile, tofile]
1672 1672 else:
1673 1673 lr.push(fromfile)
1674 1674 yield 'file', header
1675 1675 elif line[0] == ' ':
1676 1676 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
1677 1677 elif line[0] in '-+':
1678 1678 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
1679 1679 else:
1680 1680 m = lines_re.match(line)
1681 1681 if m:
1682 1682 yield 'range', m.groups()
1683 1683 else:
1684 1684 yield 'other', line
1685 1685
1686 1686 def scangitpatch(lr, firstline):
1687 1687 """
1688 1688 Git patches can emit:
1689 1689 - rename a to b
1690 1690 - change b
1691 1691 - copy a to c
1692 1692 - change c
1693 1693
1694 1694 We cannot apply this sequence as-is, the renamed 'a' could not be
1695 1695 found for it would have been renamed already. And we cannot copy
1696 1696 from 'b' instead because 'b' would have been changed already. So
1697 1697 we scan the git patch for copy and rename commands so we can
1698 1698 perform the copies ahead of time.
1699 1699 """
1700 1700 pos = 0
1701 1701 try:
1702 1702 pos = lr.fp.tell()
1703 1703 fp = lr.fp
1704 1704 except IOError:
1705 1705 fp = cStringIO.StringIO(lr.fp.read())
1706 1706 gitlr = linereader(fp)
1707 1707 gitlr.push(firstline)
1708 1708 gitpatches = readgitpatch(gitlr)
1709 1709 fp.seek(pos)
1710 1710 return gitpatches
1711 1711
1712 1712 def iterhunks(fp):
1713 1713 """Read a patch and yield the following events:
1714 1714 - ("file", afile, bfile, firsthunk): select a new target file.
1715 1715 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1716 1716 "file" event.
1717 1717 - ("git", gitchanges): current diff is in git format, gitchanges
1718 1718 maps filenames to gitpatch records. Unique event.
1719 1719 """
1720 1720 afile = ""
1721 1721 bfile = ""
1722 1722 state = None
1723 1723 hunknum = 0
1724 1724 emitfile = newfile = False
1725 1725 gitpatches = None
1726 1726
1727 1727 # our states
1728 1728 BFILE = 1
1729 1729 context = None
1730 1730 lr = linereader(fp)
1731 1731
1732 1732 while True:
1733 1733 x = lr.readline()
1734 1734 if not x:
1735 1735 break
1736 1736 if state == BFILE and (
1737 1737 (not context and x[0] == '@')
1738 1738 or (context is not False and x.startswith('***************'))
1739 1739 or x.startswith('GIT binary patch')):
1740 1740 gp = None
1741 1741 if (gitpatches and
1742 1742 gitpatches[-1].ispatching(afile, bfile)):
1743 1743 gp = gitpatches.pop()
1744 1744 if x.startswith('GIT binary patch'):
1745 1745 h = binhunk(lr, gp.path)
1746 1746 else:
1747 1747 if context is None and x.startswith('***************'):
1748 1748 context = True
1749 1749 h = hunk(x, hunknum + 1, lr, context)
1750 1750 hunknum += 1
1751 1751 if emitfile:
1752 1752 emitfile = False
1753 1753 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1754 1754 yield 'hunk', h
1755 1755 elif x.startswith('diff --git a/'):
1756 1756 m = gitre.match(x.rstrip(' \r\n'))
1757 1757 if not m:
1758 1758 continue
1759 1759 if gitpatches is None:
1760 1760 # scan whole input for git metadata
1761 1761 gitpatches = scangitpatch(lr, x)
1762 1762 yield 'git', [g.copy() for g in gitpatches
1763 1763 if g.op in ('COPY', 'RENAME')]
1764 1764 gitpatches.reverse()
1765 1765 afile = 'a/' + m.group(1)
1766 1766 bfile = 'b/' + m.group(2)
1767 1767 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1768 1768 gp = gitpatches.pop()
1769 1769 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1770 1770 if not gitpatches:
1771 1771 raise PatchError(_('failed to synchronize metadata for "%s"')
1772 1772 % afile[2:])
1773 1773 gp = gitpatches[-1]
1774 1774 newfile = True
1775 1775 elif x.startswith('---'):
1776 1776 # check for a unified diff
1777 1777 l2 = lr.readline()
1778 1778 if not l2.startswith('+++'):
1779 1779 lr.push(l2)
1780 1780 continue
1781 1781 newfile = True
1782 1782 context = False
1783 1783 afile = parsefilename(x)
1784 1784 bfile = parsefilename(l2)
1785 1785 elif x.startswith('***'):
1786 1786 # check for a context diff
1787 1787 l2 = lr.readline()
1788 1788 if not l2.startswith('---'):
1789 1789 lr.push(l2)
1790 1790 continue
1791 1791 l3 = lr.readline()
1792 1792 lr.push(l3)
1793 1793 if not l3.startswith("***************"):
1794 1794 lr.push(l2)
1795 1795 continue
1796 1796 newfile = True
1797 1797 context = True
1798 1798 afile = parsefilename(x)
1799 1799 bfile = parsefilename(l2)
1800 1800
1801 1801 if newfile:
1802 1802 newfile = False
1803 1803 emitfile = True
1804 1804 state = BFILE
1805 1805 hunknum = 0
1806 1806
1807 1807 while gitpatches:
1808 1808 gp = gitpatches.pop()
1809 1809 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1810 1810
1811 1811 def applybindelta(binchunk, data):
1812 1812 """Apply a binary delta hunk
1813 1813 The algorithm used is the algorithm from git's patch-delta.c
1814 1814 """
1815 1815 def deltahead(binchunk):
1816 1816 i = 0
1817 1817 for c in binchunk:
1818 1818 i += 1
1819 1819 if not (ord(c) & 0x80):
1820 1820 return i
1821 1821 return i
1822 1822 out = ""
1823 1823 s = deltahead(binchunk)
1824 1824 binchunk = binchunk[s:]
1825 1825 s = deltahead(binchunk)
1826 1826 binchunk = binchunk[s:]
1827 1827 i = 0
1828 1828 while i < len(binchunk):
1829 1829 cmd = ord(binchunk[i])
1830 1830 i += 1
1831 1831 if (cmd & 0x80):
1832 1832 offset = 0
1833 1833 size = 0
1834 1834 if (cmd & 0x01):
1835 1835 offset = ord(binchunk[i])
1836 1836 i += 1
1837 1837 if (cmd & 0x02):
1838 1838 offset |= ord(binchunk[i]) << 8
1839 1839 i += 1
1840 1840 if (cmd & 0x04):
1841 1841 offset |= ord(binchunk[i]) << 16
1842 1842 i += 1
1843 1843 if (cmd & 0x08):
1844 1844 offset |= ord(binchunk[i]) << 24
1845 1845 i += 1
1846 1846 if (cmd & 0x10):
1847 1847 size = ord(binchunk[i])
1848 1848 i += 1
1849 1849 if (cmd & 0x20):
1850 1850 size |= ord(binchunk[i]) << 8
1851 1851 i += 1
1852 1852 if (cmd & 0x40):
1853 1853 size |= ord(binchunk[i]) << 16
1854 1854 i += 1
1855 1855 if size == 0:
1856 1856 size = 0x10000
1857 1857 offset_end = offset + size
1858 1858 out += data[offset:offset_end]
1859 1859 elif cmd != 0:
1860 1860 offset_end = i + cmd
1861 1861 out += binchunk[i:offset_end]
1862 1862 i += cmd
1863 1863 else:
1864 1864 raise PatchError(_('unexpected delta opcode 0'))
1865 1865 return out
1866 1866
1867 1867 def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
1868 1868 """Reads a patch from fp and tries to apply it.
1869 1869
1870 1870 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
1871 1871 there was any fuzz.
1872 1872
1873 1873 If 'eolmode' is 'strict', the patch content and patched file are
1874 1874 read in binary mode. Otherwise, line endings are ignored when
1875 1875 patching then normalized according to 'eolmode'.
1876 1876 """
1877 1877 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
1878 1878 prefix=prefix, eolmode=eolmode)
1879 1879
1880 1880 def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
1881 1881 eolmode='strict'):
1882 1882
1883 1883 if prefix:
1884 1884 prefix = pathutil.canonpath(backend.repo.root, backend.repo.getcwd(),
1885 1885 prefix)
1886 1886 if prefix != '':
1887 1887 prefix += '/'
1888 1888 def pstrip(p):
1889 1889 return pathtransform(p, strip - 1, prefix)[1]
1890 1890
1891 1891 rejects = 0
1892 1892 err = 0
1893 1893 current_file = None
1894 1894
1895 1895 for state, values in iterhunks(fp):
1896 1896 if state == 'hunk':
1897 1897 if not current_file:
1898 1898 continue
1899 1899 ret = current_file.apply(values)
1900 1900 if ret > 0:
1901 1901 err = 1
1902 1902 elif state == 'file':
1903 1903 if current_file:
1904 1904 rejects += current_file.close()
1905 1905 current_file = None
1906 1906 afile, bfile, first_hunk, gp = values
1907 1907 if gp:
1908 1908 gp.path = pstrip(gp.path)
1909 1909 if gp.oldpath:
1910 1910 gp.oldpath = pstrip(gp.oldpath)
1911 1911 else:
1912 1912 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
1913 1913 prefix)
1914 1914 if gp.op == 'RENAME':
1915 1915 backend.unlink(gp.oldpath)
1916 1916 if not first_hunk:
1917 1917 if gp.op == 'DELETE':
1918 1918 backend.unlink(gp.path)
1919 1919 continue
1920 1920 data, mode = None, None
1921 1921 if gp.op in ('RENAME', 'COPY'):
1922 1922 data, mode = store.getfile(gp.oldpath)[:2]
1923 1923 # FIXME: failing getfile has never been handled here
1924 1924 assert data is not None
1925 1925 if gp.mode:
1926 1926 mode = gp.mode
1927 1927 if gp.op == 'ADD':
1928 1928 # Added files without content have no hunk and
1929 1929 # must be created
1930 1930 data = ''
1931 1931 if data or mode:
1932 1932 if (gp.op in ('ADD', 'RENAME', 'COPY')
1933 1933 and backend.exists(gp.path)):
1934 1934 raise PatchError(_("cannot create %s: destination "
1935 1935 "already exists") % gp.path)
1936 1936 backend.setfile(gp.path, data, mode, gp.oldpath)
1937 1937 continue
1938 1938 try:
1939 1939 current_file = patcher(ui, gp, backend, store,
1940 1940 eolmode=eolmode)
1941 1941 except PatchError, inst:
1942 1942 ui.warn(str(inst) + '\n')
1943 1943 current_file = None
1944 1944 rejects += 1
1945 1945 continue
1946 1946 elif state == 'git':
1947 1947 for gp in values:
1948 1948 path = pstrip(gp.oldpath)
1949 1949 data, mode = backend.getfile(path)
1950 1950 if data is None:
1951 1951 # The error ignored here will trigger a getfile()
1952 1952 # error in a place more appropriate for error
1953 1953 # handling, and will not interrupt the patching
1954 1954 # process.
1955 1955 pass
1956 1956 else:
1957 1957 store.setfile(path, data, mode)
1958 1958 else:
1959 1959 raise util.Abort(_('unsupported parser state: %s') % state)
1960 1960
1961 1961 if current_file:
1962 1962 rejects += current_file.close()
1963 1963
1964 1964 if rejects:
1965 1965 return -1
1966 1966 return err
1967 1967
1968 1968 def _externalpatch(ui, repo, patcher, patchname, strip, files,
1969 1969 similarity):
1970 1970 """use <patcher> to apply <patchname> to the working directory.
1971 1971 returns whether patch was applied with fuzz factor."""
1972 1972
1973 1973 fuzz = False
1974 1974 args = []
1975 1975 cwd = repo.root
1976 1976 if cwd:
1977 1977 args.append('-d %s' % util.shellquote(cwd))
1978 1978 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1979 1979 util.shellquote(patchname)))
1980 1980 try:
1981 1981 for line in fp:
1982 1982 line = line.rstrip()
1983 1983 ui.note(line + '\n')
1984 1984 if line.startswith('patching file '):
1985 1985 pf = util.parsepatchoutput(line)
1986 1986 printed_file = False
1987 1987 files.add(pf)
1988 1988 elif line.find('with fuzz') >= 0:
1989 1989 fuzz = True
1990 1990 if not printed_file:
1991 1991 ui.warn(pf + '\n')
1992 1992 printed_file = True
1993 1993 ui.warn(line + '\n')
1994 1994 elif line.find('saving rejects to file') >= 0:
1995 1995 ui.warn(line + '\n')
1996 1996 elif line.find('FAILED') >= 0:
1997 1997 if not printed_file:
1998 1998 ui.warn(pf + '\n')
1999 1999 printed_file = True
2000 2000 ui.warn(line + '\n')
2001 2001 finally:
2002 2002 if files:
2003 2003 scmutil.marktouched(repo, files, similarity)
2004 2004 code = fp.close()
2005 2005 if code:
2006 2006 raise PatchError(_("patch command failed: %s") %
2007 2007 util.explainexit(code)[0])
2008 2008 return fuzz
2009 2009
2010 2010 def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
2011 2011 eolmode='strict'):
2012 2012 if files is None:
2013 2013 files = set()
2014 2014 if eolmode is None:
2015 2015 eolmode = ui.config('patch', 'eol', 'strict')
2016 2016 if eolmode.lower() not in eolmodes:
2017 2017 raise util.Abort(_('unsupported line endings type: %s') % eolmode)
2018 2018 eolmode = eolmode.lower()
2019 2019
2020 2020 store = filestore()
2021 2021 try:
2022 2022 fp = open(patchobj, 'rb')
2023 2023 except TypeError:
2024 2024 fp = patchobj
2025 2025 try:
2026 2026 ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
2027 2027 eolmode=eolmode)
2028 2028 finally:
2029 2029 if fp != patchobj:
2030 2030 fp.close()
2031 2031 files.update(backend.close())
2032 2032 store.close()
2033 2033 if ret < 0:
2034 2034 raise PatchError(_('patch failed to apply'))
2035 2035 return ret > 0
2036 2036
2037 2037 def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
2038 2038 eolmode='strict', similarity=0):
2039 2039 """use builtin patch to apply <patchobj> to the working directory.
2040 2040 returns whether patch was applied with fuzz factor."""
2041 2041 backend = workingbackend(ui, repo, similarity)
2042 2042 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2043 2043
2044 2044 def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
2045 2045 eolmode='strict'):
2046 2046 backend = repobackend(ui, repo, ctx, store)
2047 2047 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2048 2048
2049 2049 def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
2050 2050 similarity=0):
2051 2051 """Apply <patchname> to the working directory.
2052 2052
2053 2053 'eolmode' specifies how end of lines should be handled. It can be:
2054 2054 - 'strict': inputs are read in binary mode, EOLs are preserved
2055 2055 - 'crlf': EOLs are ignored when patching and reset to CRLF
2056 2056 - 'lf': EOLs are ignored when patching and reset to LF
2057 2057 - None: get it from user settings, default to 'strict'
2058 2058 'eolmode' is ignored when using an external patcher program.
2059 2059
2060 2060 Returns whether patch was applied with fuzz factor.
2061 2061 """
2062 2062 patcher = ui.config('ui', 'patch')
2063 2063 if files is None:
2064 2064 files = set()
2065 2065 if patcher:
2066 2066 return _externalpatch(ui, repo, patcher, patchname, strip,
2067 2067 files, similarity)
2068 2068 return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
2069 2069 similarity)
2070 2070
2071 2071 def changedfiles(ui, repo, patchpath, strip=1):
2072 2072 backend = fsbackend(ui, repo.root)
2073 2073 fp = open(patchpath, 'rb')
2074 2074 try:
2075 2075 changed = set()
2076 2076 for state, values in iterhunks(fp):
2077 2077 if state == 'file':
2078 2078 afile, bfile, first_hunk, gp = values
2079 2079 if gp:
2080 2080 gp.path = pathtransform(gp.path, strip - 1, '')[1]
2081 2081 if gp.oldpath:
2082 2082 gp.oldpath = pathtransform(gp.oldpath, strip - 1, '')[1]
2083 2083 else:
2084 2084 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2085 2085 '')
2086 2086 changed.add(gp.path)
2087 2087 if gp.op == 'RENAME':
2088 2088 changed.add(gp.oldpath)
2089 2089 elif state not in ('hunk', 'git'):
2090 2090 raise util.Abort(_('unsupported parser state: %s') % state)
2091 2091 return changed
2092 2092 finally:
2093 2093 fp.close()
2094 2094
2095 2095 class GitDiffRequired(Exception):
2096 2096 pass
2097 2097
2098 2098 def diffallopts(ui, opts=None, untrusted=False, section='diff'):
2099 2099 '''return diffopts with all features supported and parsed'''
2100 2100 return difffeatureopts(ui, opts=opts, untrusted=untrusted, section=section,
2101 2101 git=True, whitespace=True, formatchanging=True)
2102 2102
2103 2103 diffopts = diffallopts
2104 2104
2105 2105 def difffeatureopts(ui, opts=None, untrusted=False, section='diff', git=False,
2106 2106 whitespace=False, formatchanging=False):
2107 2107 '''return diffopts with only opted-in features parsed
2108 2108
2109 2109 Features:
2110 2110 - git: git-style diffs
2111 2111 - whitespace: whitespace options like ignoreblanklines and ignorews
2112 2112 - formatchanging: options that will likely break or cause correctness issues
2113 2113 with most diff parsers
2114 2114 '''
2115 2115 def get(key, name=None, getter=ui.configbool, forceplain=None):
2116 2116 if opts:
2117 2117 v = opts.get(key)
2118 2118 if v:
2119 2119 return v
2120 2120 if forceplain is not None and ui.plain():
2121 2121 return forceplain
2122 2122 return getter(section, name or key, None, untrusted=untrusted)
2123 2123
2124 2124 # core options, expected to be understood by every diff parser
2125 2125 buildopts = {
2126 2126 'nodates': get('nodates'),
2127 2127 'showfunc': get('show_function', 'showfunc'),
2128 2128 'context': get('unified', getter=ui.config),
2129 2129 }
2130 2130
2131 2131 if git:
2132 2132 buildopts['git'] = get('git')
2133 2133 if whitespace:
2134 2134 buildopts['ignorews'] = get('ignore_all_space', 'ignorews')
2135 2135 buildopts['ignorewsamount'] = get('ignore_space_change',
2136 2136 'ignorewsamount')
2137 2137 buildopts['ignoreblanklines'] = get('ignore_blank_lines',
2138 2138 'ignoreblanklines')
2139 2139 if formatchanging:
2140 2140 buildopts['text'] = opts and opts.get('text')
2141 2141 buildopts['nobinary'] = get('nobinary')
2142 2142 buildopts['noprefix'] = get('noprefix', forceplain=False)
2143 2143
2144 2144 return mdiff.diffopts(**buildopts)
2145 2145
2146 2146 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
2147 2147 losedatafn=None, prefix='', relroot=''):
2148 2148 '''yields diff of changes to files between two nodes, or node and
2149 2149 working directory.
2150 2150
2151 2151 if node1 is None, use first dirstate parent instead.
2152 2152 if node2 is None, compare node1 with working directory.
2153 2153
2154 2154 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2155 2155 every time some change cannot be represented with the current
2156 2156 patch format. Return False to upgrade to git patch format, True to
2157 2157 accept the loss or raise an exception to abort the diff. It is
2158 2158 called with the name of current file being diffed as 'fn'. If set
2159 2159 to None, patches will always be upgraded to git format when
2160 2160 necessary.
2161 2161
2162 2162 prefix is a filename prefix that is prepended to all filenames on
2163 2163 display (used for subrepos).
2164 2164
2165 2165 relroot, if not empty, must be normalized with a trailing /. Any match
2166 2166 patterns that fall outside it will be ignored.'''
2167 2167
2168 2168 if opts is None:
2169 2169 opts = mdiff.defaultopts
2170 2170
2171 2171 if not node1 and not node2:
2172 2172 node1 = repo.dirstate.p1()
2173 2173
2174 2174 def lrugetfilectx():
2175 2175 cache = {}
2176 2176 order = collections.deque()
2177 2177 def getfilectx(f, ctx):
2178 2178 fctx = ctx.filectx(f, filelog=cache.get(f))
2179 2179 if f not in cache:
2180 2180 if len(cache) > 20:
2181 2181 del cache[order.popleft()]
2182 2182 cache[f] = fctx.filelog()
2183 2183 else:
2184 2184 order.remove(f)
2185 2185 order.append(f)
2186 2186 return fctx
2187 2187 return getfilectx
2188 2188 getfilectx = lrugetfilectx()
2189 2189
2190 2190 ctx1 = repo[node1]
2191 2191 ctx2 = repo[node2]
2192 2192
2193 2193 relfiltered = False
2194 2194 if relroot != '' and match.always():
2195 2195 # as a special case, create a new matcher with just the relroot
2196 2196 pats = [relroot]
2197 2197 match = scmutil.match(ctx2, pats, default='path')
2198 2198 relfiltered = True
2199 2199
2200 2200 if not changes:
2201 2201 changes = repo.status(ctx1, ctx2, match=match)
2202 2202 modified, added, removed = changes[:3]
2203 2203
2204 2204 if not modified and not added and not removed:
2205 2205 return []
2206 2206
2207 2207 if repo.ui.debugflag:
2208 2208 hexfunc = hex
2209 2209 else:
2210 2210 hexfunc = short
2211 2211 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2212 2212
2213 2213 copy = {}
2214 2214 if opts.git or opts.upgrade:
2215 2215 copy = copies.pathcopies(ctx1, ctx2, match=match)
2216 2216
2217 2217 if relroot is not None:
2218 2218 if not relfiltered:
2219 2219 # XXX this would ideally be done in the matcher, but that is
2220 2220 # generally meant to 'or' patterns, not 'and' them. In this case we
2221 2221 # need to 'and' all the patterns from the matcher with relroot.
2222 2222 def filterrel(l):
2223 2223 return [f for f in l if f.startswith(relroot)]
2224 2224 modified = filterrel(modified)
2225 2225 added = filterrel(added)
2226 2226 removed = filterrel(removed)
2227 2227 relfiltered = True
2228 2228 # filter out copies where either side isn't inside the relative root
2229 2229 copy = dict(((dst, src) for (dst, src) in copy.iteritems()
2230 2230 if dst.startswith(relroot)
2231 2231 and src.startswith(relroot)))
2232 2232
2233 2233 def difffn(opts, losedata):
2234 2234 return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2235 2235 copy, getfilectx, opts, losedata, prefix, relroot)
2236 2236 if opts.upgrade and not opts.git:
2237 2237 try:
2238 2238 def losedata(fn):
2239 2239 if not losedatafn or not losedatafn(fn=fn):
2240 2240 raise GitDiffRequired
2241 2241 # Buffer the whole output until we are sure it can be generated
2242 2242 return list(difffn(opts.copy(git=False), losedata))
2243 2243 except GitDiffRequired:
2244 2244 return difffn(opts.copy(git=True), None)
2245 2245 else:
2246 2246 return difffn(opts, None)
2247 2247
2248 2248 def difflabel(func, *args, **kw):
2249 2249 '''yields 2-tuples of (output, label) based on the output of func()'''
2250 2250 headprefixes = [('diff', 'diff.diffline'),
2251 2251 ('copy', 'diff.extended'),
2252 2252 ('rename', 'diff.extended'),
2253 2253 ('old', 'diff.extended'),
2254 2254 ('new', 'diff.extended'),
2255 2255 ('deleted', 'diff.extended'),
2256 2256 ('---', 'diff.file_a'),
2257 2257 ('+++', 'diff.file_b')]
2258 2258 textprefixes = [('@', 'diff.hunk'),
2259 2259 ('-', 'diff.deleted'),
2260 2260 ('+', 'diff.inserted')]
2261 2261 head = False
2262 2262 for chunk in func(*args, **kw):
2263 2263 lines = chunk.split('\n')
2264 2264 for i, line in enumerate(lines):
2265 2265 if i != 0:
2266 2266 yield ('\n', '')
2267 2267 if head:
2268 2268 if line.startswith('@'):
2269 2269 head = False
2270 2270 else:
2271 2271 if line and line[0] not in ' +-@\\':
2272 2272 head = True
2273 2273 stripline = line
2274 2274 diffline = False
2275 2275 if not head and line and line[0] in '+-':
2276 2276 # highlight tabs and trailing whitespace, but only in
2277 2277 # changed lines
2278 2278 stripline = line.rstrip()
2279 2279 diffline = True
2280 2280
2281 2281 prefixes = textprefixes
2282 2282 if head:
2283 2283 prefixes = headprefixes
2284 2284 for prefix, label in prefixes:
2285 2285 if stripline.startswith(prefix):
2286 2286 if diffline:
2287 2287 for token in tabsplitter.findall(stripline):
2288 2288 if '\t' == token[0]:
2289 2289 yield (token, 'diff.tab')
2290 2290 else:
2291 2291 yield (token, label)
2292 2292 else:
2293 2293 yield (stripline, label)
2294 2294 break
2295 2295 else:
2296 2296 yield (line, '')
2297 2297 if line != stripline:
2298 2298 yield (line[len(stripline):], 'diff.trailingwhitespace')
2299 2299
2300 2300 def diffui(*args, **kw):
2301 2301 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2302 2302 return difflabel(diff, *args, **kw)
2303 2303
2304 2304 def _filepairs(ctx1, modified, added, removed, copy, opts):
2305 2305 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2306 2306 before and f2 is the the name after. For added files, f1 will be None,
2307 2307 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2308 2308 or 'rename' (the latter two only if opts.git is set).'''
2309 2309 gone = set()
2310 2310
2311 2311 copyto = dict([(v, k) for k, v in copy.items()])
2312 2312
2313 2313 addedset, removedset = set(added), set(removed)
2314 2314 # Fix up added, since merged-in additions appear as
2315 2315 # modifications during merges
2316 2316 for f in modified:
2317 2317 if f not in ctx1:
2318 2318 addedset.add(f)
2319 2319
2320 2320 for f in sorted(modified + added + removed):
2321 2321 copyop = None
2322 2322 f1, f2 = f, f
2323 2323 if f in addedset:
2324 2324 f1 = None
2325 2325 if f in copy:
2326 2326 if opts.git:
2327 2327 f1 = copy[f]
2328 2328 if f1 in removedset and f1 not in gone:
2329 2329 copyop = 'rename'
2330 2330 gone.add(f1)
2331 2331 else:
2332 2332 copyop = 'copy'
2333 2333 elif f in removedset:
2334 2334 f2 = None
2335 2335 if opts.git:
2336 2336 # have we already reported a copy above?
2337 2337 if (f in copyto and copyto[f] in addedset
2338 2338 and copy[copyto[f]] == f):
2339 2339 continue
2340 2340 yield f1, f2, copyop
2341 2341
2342 2342 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2343 2343 copy, getfilectx, opts, losedatafn, prefix, relroot):
2344 2344 '''given input data, generate a diff and yield it in blocks
2345 2345
2346 2346 If generating a diff would lose data like flags or binary data and
2347 2347 losedatafn is not None, it will be called.
2348 2348
2349 2349 relroot is removed and prefix is added to every path in the diff output.
2350 2350
2351 2351 If relroot is not empty, this function expects every path in modified,
2352 2352 added, removed and copy to start with it.'''
2353 2353
2354 2354 def gitindex(text):
2355 2355 if not text:
2356 2356 text = ""
2357 2357 l = len(text)
2358 2358 s = util.sha1('blob %d\0' % l)
2359 2359 s.update(text)
2360 2360 return s.hexdigest()
2361 2361
2362 2362 if opts.noprefix:
2363 2363 aprefix = bprefix = ''
2364 2364 else:
2365 2365 aprefix = 'a/'
2366 2366 bprefix = 'b/'
2367 2367
2368 2368 def diffline(f, revs):
2369 2369 revinfo = ' '.join(["-r %s" % rev for rev in revs])
2370 2370 return 'diff %s %s' % (revinfo, f)
2371 2371
2372 2372 date1 = util.datestr(ctx1.date())
2373 2373 date2 = util.datestr(ctx2.date())
2374 2374
2375 2375 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
2376 2376
2377 2377 if relroot != '' and (repo.ui.configbool('devel', 'all')
2378 2378 or repo.ui.configbool('devel', 'check-relroot')):
2379 2379 for f in modified + added + removed + copy.keys() + copy.values():
2380 2380 if f is not None and not f.startswith(relroot):
2381 2381 raise AssertionError(
2382 2382 "file %s doesn't start with relroot %s" % (f, relroot))
2383 2383
2384 2384 for f1, f2, copyop in _filepairs(
2385 2385 ctx1, modified, added, removed, copy, opts):
2386 2386 content1 = None
2387 2387 content2 = None
2388 2388 flag1 = None
2389 2389 flag2 = None
2390 2390 if f1:
2391 2391 content1 = getfilectx(f1, ctx1).data()
2392 2392 if opts.git or losedatafn:
2393 2393 flag1 = ctx1.flags(f1)
2394 2394 if f2:
2395 2395 content2 = getfilectx(f2, ctx2).data()
2396 2396 if opts.git or losedatafn:
2397 2397 flag2 = ctx2.flags(f2)
2398 2398 binary = False
2399 2399 if opts.git or losedatafn:
2400 2400 binary = util.binary(content1) or util.binary(content2)
2401 2401
2402 2402 if losedatafn and not opts.git:
2403 2403 if (binary or
2404 2404 # copy/rename
2405 2405 f2 in copy or
2406 2406 # empty file creation
2407 2407 (not f1 and not content2) or
2408 2408 # empty file deletion
2409 2409 (not content1 and not f2) or
2410 2410 # create with flags
2411 2411 (not f1 and flag2) or
2412 2412 # change flags
2413 2413 (f1 and f2 and flag1 != flag2)):
2414 2414 losedatafn(f2 or f1)
2415 2415
2416 2416 path1 = f1 or f2
2417 2417 path2 = f2 or f1
2418 2418 path1 = posixpath.join(prefix, path1[len(relroot):])
2419 2419 path2 = posixpath.join(prefix, path2[len(relroot):])
2420 2420 header = []
2421 2421 if opts.git:
2422 2422 header.append('diff --git %s%s %s%s' %
2423 2423 (aprefix, path1, bprefix, path2))
2424 2424 if not f1: # added
2425 2425 header.append('new file mode %s' % gitmode[flag2])
2426 2426 elif not f2: # removed
2427 2427 header.append('deleted file mode %s' % gitmode[flag1])
2428 2428 else: # modified/copied/renamed
2429 2429 mode1, mode2 = gitmode[flag1], gitmode[flag2]
2430 2430 if mode1 != mode2:
2431 2431 header.append('old mode %s' % mode1)
2432 2432 header.append('new mode %s' % mode2)
2433 2433 if copyop is not None:
2434 2434 header.append('%s from %s' % (copyop, path1))
2435 2435 header.append('%s to %s' % (copyop, path2))
2436 2436 elif revs and not repo.ui.quiet:
2437 2437 header.append(diffline(path1, revs))
2438 2438
2439 2439 if binary and opts.git and not opts.nobinary:
2440 2440 text = mdiff.b85diff(content1, content2)
2441 2441 if text:
2442 2442 header.append('index %s..%s' %
2443 2443 (gitindex(content1), gitindex(content2)))
2444 2444 else:
2445 2445 text = mdiff.unidiff(content1, date1,
2446 2446 content2, date2,
2447 2447 path1, path2, opts=opts)
2448 2448 if header and (text or len(header) > 1):
2449 2449 yield '\n'.join(header) + '\n'
2450 2450 if text:
2451 2451 yield text
2452 2452
2453 2453 def diffstatsum(stats):
2454 2454 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
2455 2455 for f, a, r, b in stats:
2456 2456 maxfile = max(maxfile, encoding.colwidth(f))
2457 2457 maxtotal = max(maxtotal, a + r)
2458 2458 addtotal += a
2459 2459 removetotal += r
2460 2460 binary = binary or b
2461 2461
2462 2462 return maxfile, maxtotal, addtotal, removetotal, binary
2463 2463
2464 2464 def diffstatdata(lines):
2465 2465 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
2466 2466
2467 2467 results = []
2468 2468 filename, adds, removes, isbinary = None, 0, 0, False
2469 2469
2470 2470 def addresult():
2471 2471 if filename:
2472 2472 results.append((filename, adds, removes, isbinary))
2473 2473
2474 2474 for line in lines:
2475 2475 if line.startswith('diff'):
2476 2476 addresult()
2477 2477 # set numbers to 0 anyway when starting new file
2478 2478 adds, removes, isbinary = 0, 0, False
2479 2479 if line.startswith('diff --git a/'):
2480 2480 filename = gitre.search(line).group(2)
2481 2481 elif line.startswith('diff -r'):
2482 2482 # format: "diff -r ... -r ... filename"
2483 2483 filename = diffre.search(line).group(1)
2484 2484 elif line.startswith('+') and not line.startswith('+++ '):
2485 2485 adds += 1
2486 2486 elif line.startswith('-') and not line.startswith('--- '):
2487 2487 removes += 1
2488 2488 elif (line.startswith('GIT binary patch') or
2489 2489 line.startswith('Binary file')):
2490 2490 isbinary = True
2491 2491 addresult()
2492 2492 return results
2493 2493
2494 2494 def diffstat(lines, width=80, git=False):
2495 2495 output = []
2496 2496 stats = diffstatdata(lines)
2497 2497 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
2498 2498
2499 2499 countwidth = len(str(maxtotal))
2500 2500 if hasbinary and countwidth < 3:
2501 2501 countwidth = 3
2502 2502 graphwidth = width - countwidth - maxname - 6
2503 2503 if graphwidth < 10:
2504 2504 graphwidth = 10
2505 2505
2506 2506 def scale(i):
2507 2507 if maxtotal <= graphwidth:
2508 2508 return i
2509 2509 # If diffstat runs out of room it doesn't print anything,
2510 2510 # which isn't very useful, so always print at least one + or -
2511 2511 # if there were at least some changes.
2512 2512 return max(i * graphwidth // maxtotal, int(bool(i)))
2513 2513
2514 2514 for filename, adds, removes, isbinary in stats:
2515 2515 if isbinary:
2516 2516 count = 'Bin'
2517 2517 else:
2518 2518 count = adds + removes
2519 2519 pluses = '+' * scale(adds)
2520 2520 minuses = '-' * scale(removes)
2521 2521 output.append(' %s%s | %*s %s%s\n' %
2522 2522 (filename, ' ' * (maxname - encoding.colwidth(filename)),
2523 2523 countwidth, count, pluses, minuses))
2524 2524
2525 2525 if stats:
2526 2526 output.append(_(' %d files changed, %d insertions(+), '
2527 2527 '%d deletions(-)\n')
2528 2528 % (len(stats), totaladds, totalremoves))
2529 2529
2530 2530 return ''.join(output)
2531 2531
2532 2532 def diffstatui(*args, **kw):
2533 2533 '''like diffstat(), but yields 2-tuples of (output, label) for
2534 2534 ui.write()
2535 2535 '''
2536 2536
2537 2537 for line in diffstat(*args, **kw).splitlines():
2538 2538 if line and line[-1] in '+-':
2539 2539 name, graph = line.rsplit(' ', 1)
2540 2540 yield (name + ' ', '')
2541 2541 m = re.search(r'\++', graph)
2542 2542 if m:
2543 2543 yield (m.group(0), 'diffstat.inserted')
2544 2544 m = re.search(r'-+', graph)
2545 2545 if m:
2546 2546 yield (m.group(0), 'diffstat.deleted')
2547 2547 else:
2548 2548 yield (line, '')
2549 2549 yield ('\n', '')
@@ -1,633 +1,633
1 1 # posix.py - Posix utility function implementations for Mercurial
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 import encoding
10 10 import os, sys, errno, stat, getpass, pwd, grp, socket, tempfile, unicodedata
11 11 import select
12 12 import fcntl, re
13 13
14 14 posixfile = open
15 15 normpath = os.path.normpath
16 16 samestat = os.path.samestat
17 17 oslink = os.link
18 18 unlink = os.unlink
19 19 rename = os.rename
20 20 removedirs = os.removedirs
21 21 expandglobs = False
22 22
23 23 umask = os.umask(0)
24 24 os.umask(umask)
25 25
26 26 def split(p):
27 27 '''Same as posixpath.split, but faster
28 28
29 29 >>> import posixpath
30 30 >>> for f in ['/absolute/path/to/file',
31 31 ... 'relative/path/to/file',
32 32 ... 'file_alone',
33 33 ... 'path/to/directory/',
34 34 ... '/multiple/path//separators',
35 35 ... '/file_at_root',
36 36 ... '///multiple_leading_separators_at_root',
37 37 ... '']:
38 38 ... assert split(f) == posixpath.split(f), f
39 39 '''
40 40 ht = p.rsplit('/', 1)
41 41 if len(ht) == 1:
42 42 return '', p
43 43 nh = ht[0].rstrip('/')
44 44 if nh:
45 45 return nh, ht[1]
46 46 return ht[0] + '/', ht[1]
47 47
48 48 def openhardlinks():
49 49 '''return true if it is safe to hold open file handles to hardlinks'''
50 50 return True
51 51
52 52 def nlinks(name):
53 53 '''return number of hardlinks for the given file'''
54 54 return os.lstat(name).st_nlink
55 55
56 56 def parsepatchoutput(output_line):
57 57 """parses the output produced by patch and returns the filename"""
58 58 pf = output_line[14:]
59 59 if os.sys.platform == 'OpenVMS':
60 60 if pf[0] == '`':
61 61 pf = pf[1:-1] # Remove the quotes
62 62 else:
63 63 if pf.startswith("'") and pf.endswith("'") and " " in pf:
64 64 pf = pf[1:-1] # Remove the quotes
65 65 return pf
66 66
67 67 def sshargs(sshcmd, host, user, port):
68 68 '''Build argument list for ssh'''
69 69 args = user and ("%s@%s" % (user, host)) or host
70 70 return port and ("%s -p %s" % (args, port)) or args
71 71
72 72 def isexec(f):
73 73 """check whether a file is executable"""
74 return (os.lstat(f).st_mode & 0100 != 0)
74 return (os.lstat(f).st_mode & 0o100 != 0)
75 75
76 76 def setflags(f, l, x):
77 77 s = os.lstat(f).st_mode
78 78 if l:
79 79 if not stat.S_ISLNK(s):
80 80 # switch file to link
81 81 fp = open(f)
82 82 data = fp.read()
83 83 fp.close()
84 84 os.unlink(f)
85 85 try:
86 86 os.symlink(data, f)
87 87 except OSError:
88 88 # failed to make a link, rewrite file
89 89 fp = open(f, "w")
90 90 fp.write(data)
91 91 fp.close()
92 92 # no chmod needed at this point
93 93 return
94 94 if stat.S_ISLNK(s):
95 95 # switch link to file
96 96 data = os.readlink(f)
97 97 os.unlink(f)
98 98 fp = open(f, "w")
99 99 fp.write(data)
100 100 fp.close()
101 s = 0666 & ~umask # avoid restatting for chmod
101 s = 0o666 & ~umask # avoid restatting for chmod
102 102
103 sx = s & 0100
103 sx = s & 0o100
104 104 if x and not sx:
105 105 # Turn on +x for every +r bit when making a file executable
106 106 # and obey umask.
107 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
107 os.chmod(f, s | (s & 0o444) >> 2 & ~umask)
108 108 elif not x and sx:
109 109 # Turn off all +x bits
110 os.chmod(f, s & 0666)
110 os.chmod(f, s & 0o666)
111 111
112 112 def copymode(src, dst, mode=None):
113 113 '''Copy the file mode from the file at path src to dst.
114 114 If src doesn't exist, we're using mode instead. If mode is None, we're
115 115 using umask.'''
116 116 try:
117 st_mode = os.lstat(src).st_mode & 0777
117 st_mode = os.lstat(src).st_mode & 0o777
118 118 except OSError, inst:
119 119 if inst.errno != errno.ENOENT:
120 120 raise
121 121 st_mode = mode
122 122 if st_mode is None:
123 123 st_mode = ~umask
124 st_mode &= 0666
124 st_mode &= 0o666
125 125 os.chmod(dst, st_mode)
126 126
127 127 def checkexec(path):
128 128 """
129 129 Check whether the given path is on a filesystem with UNIX-like exec flags
130 130
131 131 Requires a directory (like /foo/.hg)
132 132 """
133 133
134 134 # VFAT on some Linux versions can flip mode but it doesn't persist
135 135 # a FS remount. Frequently we can detect it if files are created
136 136 # with exec bit on.
137 137
138 138 try:
139 139 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
140 140 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
141 141 try:
142 142 os.close(fh)
143 m = os.stat(fn).st_mode & 0777
143 m = os.stat(fn).st_mode & 0o777
144 144 new_file_has_exec = m & EXECFLAGS
145 145 os.chmod(fn, m ^ EXECFLAGS)
146 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
146 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0o777) == m)
147 147 finally:
148 148 os.unlink(fn)
149 149 except (IOError, OSError):
150 150 # we don't care, the user probably won't be able to commit anyway
151 151 return False
152 152 return not (new_file_has_exec or exec_flags_cannot_flip)
153 153
154 154 def checklink(path):
155 155 """check whether the given path is on a symlink-capable filesystem"""
156 156 # mktemp is not racy because symlink creation will fail if the
157 157 # file already exists
158 158 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
159 159 try:
160 160 fd = tempfile.NamedTemporaryFile(dir=path, prefix='hg-checklink-')
161 161 try:
162 162 os.symlink(os.path.basename(fd.name), name)
163 163 os.unlink(name)
164 164 return True
165 165 finally:
166 166 fd.close()
167 167 except AttributeError:
168 168 return False
169 169 except OSError, inst:
170 170 # sshfs might report failure while successfully creating the link
171 171 if inst[0] == errno.EIO and os.path.exists(name):
172 172 os.unlink(name)
173 173 return False
174 174
175 175 def checkosfilename(path):
176 176 '''Check that the base-relative path is a valid filename on this platform.
177 177 Returns None if the path is ok, or a UI string describing the problem.'''
178 178 pass # on posix platforms, every path is ok
179 179
180 180 def setbinary(fd):
181 181 pass
182 182
183 183 def pconvert(path):
184 184 return path
185 185
186 186 def localpath(path):
187 187 return path
188 188
189 189 def samefile(fpath1, fpath2):
190 190 """Returns whether path1 and path2 refer to the same file. This is only
191 191 guaranteed to work for files, not directories."""
192 192 return os.path.samefile(fpath1, fpath2)
193 193
194 194 def samedevice(fpath1, fpath2):
195 195 """Returns whether fpath1 and fpath2 are on the same device. This is only
196 196 guaranteed to work for files, not directories."""
197 197 st1 = os.lstat(fpath1)
198 198 st2 = os.lstat(fpath2)
199 199 return st1.st_dev == st2.st_dev
200 200
201 201 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
202 202 def normcase(path):
203 203 return path.lower()
204 204
205 205 # what normcase does to ASCII strings
206 206 normcasespec = encoding.normcasespecs.lower
207 207 # fallback normcase function for non-ASCII strings
208 208 normcasefallback = normcase
209 209
210 210 if sys.platform == 'darwin':
211 211
212 212 def normcase(path):
213 213 '''
214 214 Normalize a filename for OS X-compatible comparison:
215 215 - escape-encode invalid characters
216 216 - decompose to NFD
217 217 - lowercase
218 218 - omit ignored characters [200c-200f, 202a-202e, 206a-206f,feff]
219 219
220 220 >>> normcase('UPPER')
221 221 'upper'
222 222 >>> normcase('Caf\xc3\xa9')
223 223 'cafe\\xcc\\x81'
224 224 >>> normcase('\xc3\x89')
225 225 'e\\xcc\\x81'
226 226 >>> normcase('\xb8\xca\xc3\xca\xbe\xc8.JPG') # issue3918
227 227 '%b8%ca%c3\\xca\\xbe%c8.jpg'
228 228 '''
229 229
230 230 try:
231 231 return encoding.asciilower(path) # exception for non-ASCII
232 232 except UnicodeDecodeError:
233 233 return normcasefallback(path)
234 234
235 235 normcasespec = encoding.normcasespecs.lower
236 236
237 237 def normcasefallback(path):
238 238 try:
239 239 u = path.decode('utf-8')
240 240 except UnicodeDecodeError:
241 241 # OS X percent-encodes any bytes that aren't valid utf-8
242 242 s = ''
243 243 g = ''
244 244 l = 0
245 245 for c in path:
246 246 o = ord(c)
247 247 if l and o < 128 or o >= 192:
248 248 # we want a continuation byte, but didn't get one
249 249 s += ''.join(["%%%02X" % ord(x) for x in g])
250 250 g = ''
251 251 l = 0
252 252 if l == 0 and o < 128:
253 253 # ascii
254 254 s += c
255 255 elif l == 0 and 194 <= o < 245:
256 256 # valid leading bytes
257 257 if o < 224:
258 258 l = 1
259 259 elif o < 240:
260 260 l = 2
261 261 else:
262 262 l = 3
263 263 g = c
264 264 elif l > 0 and 128 <= o < 192:
265 265 # valid continuations
266 266 g += c
267 267 l -= 1
268 268 if not l:
269 269 s += g
270 270 g = ''
271 271 else:
272 272 # invalid
273 273 s += "%%%02X" % o
274 274
275 275 # any remaining partial characters
276 276 s += ''.join(["%%%02X" % ord(x) for x in g])
277 277 u = s.decode('utf-8')
278 278
279 279 # Decompose then lowercase (HFS+ technote specifies lower)
280 280 enc = unicodedata.normalize('NFD', u).lower().encode('utf-8')
281 281 # drop HFS+ ignored characters
282 282 return encoding.hfsignoreclean(enc)
283 283
284 284 if sys.platform == 'cygwin':
285 285 # workaround for cygwin, in which mount point part of path is
286 286 # treated as case sensitive, even though underlying NTFS is case
287 287 # insensitive.
288 288
289 289 # default mount points
290 290 cygwinmountpoints = sorted([
291 291 "/usr/bin",
292 292 "/usr/lib",
293 293 "/cygdrive",
294 294 ], reverse=True)
295 295
296 296 # use upper-ing as normcase as same as NTFS workaround
297 297 def normcase(path):
298 298 pathlen = len(path)
299 299 if (pathlen == 0) or (path[0] != os.sep):
300 300 # treat as relative
301 301 return encoding.upper(path)
302 302
303 303 # to preserve case of mountpoint part
304 304 for mp in cygwinmountpoints:
305 305 if not path.startswith(mp):
306 306 continue
307 307
308 308 mplen = len(mp)
309 309 if mplen == pathlen: # mount point itself
310 310 return mp
311 311 if path[mplen] == os.sep:
312 312 return mp + encoding.upper(path[mplen:])
313 313
314 314 return encoding.upper(path)
315 315
316 316 normcasespec = encoding.normcasespecs.other
317 317 normcasefallback = normcase
318 318
319 319 # Cygwin translates native ACLs to POSIX permissions,
320 320 # but these translations are not supported by native
321 321 # tools, so the exec bit tends to be set erroneously.
322 322 # Therefore, disable executable bit access on Cygwin.
323 323 def checkexec(path):
324 324 return False
325 325
326 326 # Similarly, Cygwin's symlink emulation is likely to create
327 327 # problems when Mercurial is used from both Cygwin and native
328 328 # Windows, with other native tools, or on shared volumes
329 329 def checklink(path):
330 330 return False
331 331
332 332 _needsshellquote = None
333 333 def shellquote(s):
334 334 if os.sys.platform == 'OpenVMS':
335 335 return '"%s"' % s
336 336 global _needsshellquote
337 337 if _needsshellquote is None:
338 338 _needsshellquote = re.compile(r'[^a-zA-Z0-9._/-]').search
339 339 if s and not _needsshellquote(s):
340 340 # "s" shouldn't have to be quoted
341 341 return s
342 342 else:
343 343 return "'%s'" % s.replace("'", "'\\''")
344 344
345 345 def quotecommand(cmd):
346 346 return cmd
347 347
348 348 def popen(command, mode='r'):
349 349 return os.popen(command, mode)
350 350
351 351 def testpid(pid):
352 352 '''return False if pid dead, True if running or not sure'''
353 353 if os.sys.platform == 'OpenVMS':
354 354 return True
355 355 try:
356 356 os.kill(pid, 0)
357 357 return True
358 358 except OSError, inst:
359 359 return inst.errno != errno.ESRCH
360 360
361 361 def explainexit(code):
362 362 """return a 2-tuple (desc, code) describing a subprocess status
363 363 (codes from kill are negative - not os.system/wait encoding)"""
364 364 if code >= 0:
365 365 return _("exited with status %d") % code, code
366 366 return _("killed by signal %d") % -code, -code
367 367
368 368 def isowner(st):
369 369 """Return True if the stat object st is from the current user."""
370 370 return st.st_uid == os.getuid()
371 371
372 372 def findexe(command):
373 373 '''Find executable for command searching like which does.
374 374 If command is a basename then PATH is searched for command.
375 375 PATH isn't searched if command is an absolute or relative path.
376 376 If command isn't found None is returned.'''
377 377 if sys.platform == 'OpenVMS':
378 378 return command
379 379
380 380 def findexisting(executable):
381 381 'Will return executable if existing file'
382 382 if os.path.isfile(executable) and os.access(executable, os.X_OK):
383 383 return executable
384 384 return None
385 385
386 386 if os.sep in command:
387 387 return findexisting(command)
388 388
389 389 if sys.platform == 'plan9':
390 390 return findexisting(os.path.join('/bin', command))
391 391
392 392 for path in os.environ.get('PATH', '').split(os.pathsep):
393 393 executable = findexisting(os.path.join(path, command))
394 394 if executable is not None:
395 395 return executable
396 396 return None
397 397
398 398 def setsignalhandler():
399 399 pass
400 400
401 401 _wantedkinds = set([stat.S_IFREG, stat.S_IFLNK])
402 402
403 403 def statfiles(files):
404 404 '''Stat each file in files. Yield each stat, or None if a file does not
405 405 exist or has a type we don't care about.'''
406 406 lstat = os.lstat
407 407 getkind = stat.S_IFMT
408 408 for nf in files:
409 409 try:
410 410 st = lstat(nf)
411 411 if getkind(st.st_mode) not in _wantedkinds:
412 412 st = None
413 413 except OSError, err:
414 414 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
415 415 raise
416 416 st = None
417 417 yield st
418 418
419 419 def getuser():
420 420 '''return name of current user'''
421 421 return getpass.getuser()
422 422
423 423 def username(uid=None):
424 424 """Return the name of the user with the given uid.
425 425
426 426 If uid is None, return the name of the current user."""
427 427
428 428 if uid is None:
429 429 uid = os.getuid()
430 430 try:
431 431 return pwd.getpwuid(uid)[0]
432 432 except KeyError:
433 433 return str(uid)
434 434
435 435 def groupname(gid=None):
436 436 """Return the name of the group with the given gid.
437 437
438 438 If gid is None, return the name of the current group."""
439 439
440 440 if gid is None:
441 441 gid = os.getgid()
442 442 try:
443 443 return grp.getgrgid(gid)[0]
444 444 except KeyError:
445 445 return str(gid)
446 446
447 447 def groupmembers(name):
448 448 """Return the list of members of the group with the given
449 449 name, KeyError if the group does not exist.
450 450 """
451 451 return list(grp.getgrnam(name).gr_mem)
452 452
453 453 def spawndetached(args):
454 454 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
455 455 args[0], args)
456 456
457 457 def gethgcmd():
458 458 return sys.argv[:1]
459 459
460 460 def termwidth():
461 461 try:
462 462 import termios, array
463 463 for dev in (sys.stderr, sys.stdout, sys.stdin):
464 464 try:
465 465 try:
466 466 fd = dev.fileno()
467 467 except AttributeError:
468 468 continue
469 469 if not os.isatty(fd):
470 470 continue
471 471 try:
472 472 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
473 473 width = array.array('h', arri)[1]
474 474 if width > 0:
475 475 return width
476 476 except AttributeError:
477 477 pass
478 478 except ValueError:
479 479 pass
480 480 except IOError, e:
481 481 if e[0] == errno.EINVAL:
482 482 pass
483 483 else:
484 484 raise
485 485 except ImportError:
486 486 pass
487 487 return 80
488 488
489 489 def makedir(path, notindexed):
490 490 os.mkdir(path)
491 491
492 492 def unlinkpath(f, ignoremissing=False):
493 493 """unlink and remove the directory if it is empty"""
494 494 try:
495 495 os.unlink(f)
496 496 except OSError, e:
497 497 if not (ignoremissing and e.errno == errno.ENOENT):
498 498 raise
499 499 # try removing directories that might now be empty
500 500 try:
501 501 os.removedirs(os.path.dirname(f))
502 502 except OSError:
503 503 pass
504 504
505 505 def lookupreg(key, name=None, scope=None):
506 506 return None
507 507
508 508 def hidewindow():
509 509 """Hide current shell window.
510 510
511 511 Used to hide the window opened when starting asynchronous
512 512 child process under Windows, unneeded on other systems.
513 513 """
514 514 pass
515 515
516 516 class cachestat(object):
517 517 def __init__(self, path):
518 518 self.stat = os.stat(path)
519 519
520 520 def cacheable(self):
521 521 return bool(self.stat.st_ino)
522 522
523 523 __hash__ = object.__hash__
524 524
525 525 def __eq__(self, other):
526 526 try:
527 527 # Only dev, ino, size, mtime and atime are likely to change. Out
528 528 # of these, we shouldn't compare atime but should compare the
529 529 # rest. However, one of the other fields changing indicates
530 530 # something fishy going on, so return False if anything but atime
531 531 # changes.
532 532 return (self.stat.st_mode == other.stat.st_mode and
533 533 self.stat.st_ino == other.stat.st_ino and
534 534 self.stat.st_dev == other.stat.st_dev and
535 535 self.stat.st_nlink == other.stat.st_nlink and
536 536 self.stat.st_uid == other.stat.st_uid and
537 537 self.stat.st_gid == other.stat.st_gid and
538 538 self.stat.st_size == other.stat.st_size and
539 539 self.stat.st_mtime == other.stat.st_mtime and
540 540 self.stat.st_ctime == other.stat.st_ctime)
541 541 except AttributeError:
542 542 return False
543 543
544 544 def __ne__(self, other):
545 545 return not self == other
546 546
547 547 def executablepath():
548 548 return None # available on Windows only
549 549
550 550 class unixdomainserver(socket.socket):
551 551 def __init__(self, join, subsystem):
552 552 '''Create a unix domain socket with the given prefix.'''
553 553 super(unixdomainserver, self).__init__(socket.AF_UNIX)
554 554 sockname = subsystem + '.sock'
555 555 self.realpath = self.path = join(sockname)
556 556 if os.path.islink(self.path):
557 557 if os.path.exists(self.path):
558 558 self.realpath = os.readlink(self.path)
559 559 else:
560 560 os.unlink(self.path)
561 561 try:
562 562 self.bind(self.realpath)
563 563 except socket.error, err:
564 564 if err.args[0] == 'AF_UNIX path too long':
565 565 tmpdir = tempfile.mkdtemp(prefix='hg-%s-' % subsystem)
566 566 self.realpath = os.path.join(tmpdir, sockname)
567 567 try:
568 568 self.bind(self.realpath)
569 569 os.symlink(self.realpath, self.path)
570 570 except (OSError, socket.error):
571 571 self.cleanup()
572 572 raise
573 573 else:
574 574 raise
575 575 self.listen(5)
576 576
577 577 def cleanup(self):
578 578 def okayifmissing(f, path):
579 579 try:
580 580 f(path)
581 581 except OSError, err:
582 582 if err.errno != errno.ENOENT:
583 583 raise
584 584
585 585 okayifmissing(os.unlink, self.path)
586 586 if self.realpath != self.path:
587 587 okayifmissing(os.unlink, self.realpath)
588 588 okayifmissing(os.rmdir, os.path.dirname(self.realpath))
589 589
590 590 def statislink(st):
591 591 '''check whether a stat result is a symlink'''
592 592 return st and stat.S_ISLNK(st.st_mode)
593 593
594 594 def statisexec(st):
595 595 '''check whether a stat result is an executable file'''
596 return st and (st.st_mode & 0100 != 0)
596 return st and (st.st_mode & 0o100 != 0)
597 597
598 598 def poll(fds):
599 599 """block until something happens on any file descriptor
600 600
601 601 This is a generic helper that will check for any activity
602 602 (read, write. exception) and return the list of touched files.
603 603
604 604 In unsupported cases, it will raise a NotImplementedError"""
605 605 try:
606 606 res = select.select(fds, fds, fds)
607 607 except ValueError: # out of range file descriptor
608 608 raise NotImplementedError()
609 609 return sorted(list(set(sum(res, []))))
610 610
611 611 def readpipe(pipe):
612 612 """Read all available data from a pipe."""
613 613 # We can't fstat() a pipe because Linux will always report 0.
614 614 # So, we set the pipe to non-blocking mode and read everything
615 615 # that's available.
616 616 flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
617 617 flags |= os.O_NONBLOCK
618 618 oldflags = fcntl.fcntl(pipe, fcntl.F_SETFL, flags)
619 619
620 620 try:
621 621 chunks = []
622 622 while True:
623 623 try:
624 624 s = pipe.read()
625 625 if not s:
626 626 break
627 627 chunks.append(s)
628 628 except IOError:
629 629 break
630 630
631 631 return ''.join(chunks)
632 632 finally:
633 633 fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now