##// END OF EJS Templates
consistency: use util.realpath instead of os.path.realpath where useful...
Thomas Arendsen Hein -
r15355:dbdb7775 stable
parent child Browse files
Show More
@@ -1,291 +1,291
1 1 """reorder a revlog (the manifest by default) to save space
2 2
3 3 Specifically, this topologically sorts the revisions in the revlog so that
4 4 revisions on the same branch are adjacent as much as possible. This is a
5 5 workaround for the fact that Mercurial computes deltas relative to the
6 6 previous revision rather than relative to a parent revision.
7 7
8 8 This is *not* safe to run on a changelog.
9 9 """
10 10
11 11 # Originally written by Benoit Boissinot <benoit.boissinot at ens-lyon.org>
12 12 # as a patch to rewrite-log. Cleaned up, refactored, documented, and
13 13 # renamed by Greg Ward <greg at gerg.ca>.
14 14
15 15 # XXX would be nice to have a way to verify the repository after shrinking,
16 16 # e.g. by comparing "before" and "after" states of random changesets
17 17 # (maybe: export before, shrink, export after, diff).
18 18
19 19 import os, tempfile, errno
20 20 from mercurial import revlog, transaction, node, util, scmutil
21 21 from mercurial import changegroup
22 22 from mercurial.i18n import _
23 23
24 24
25 25 def postorder(start, edges):
26 26 result = []
27 27 visit = list(start)
28 28 finished = set()
29 29
30 30 while visit:
31 31 cur = visit[-1]
32 32 for p in edges[cur]:
33 33 # defend against node.nullrev because it's occasionally
34 34 # possible for a node to have parents (null, something)
35 35 # rather than (something, null)
36 36 if p not in finished and p != node.nullrev:
37 37 visit.append(p)
38 38 break
39 39 else:
40 40 result.append(cur)
41 41 finished.add(cur)
42 42 visit.pop()
43 43
44 44 return result
45 45
46 46 def toposort_reversepostorder(ui, rl):
47 47 # postorder of the reverse directed graph
48 48
49 49 # map rev to list of parent revs (p2 first)
50 50 parents = {}
51 51 heads = set()
52 52 ui.status(_('reading revs\n'))
53 53 try:
54 54 for rev in rl:
55 55 ui.progress(_('reading'), rev, total=len(rl))
56 56 (p1, p2) = rl.parentrevs(rev)
57 57 if p1 == p2 == node.nullrev:
58 58 parents[rev] = () # root node
59 59 elif p1 == p2 or p2 == node.nullrev:
60 60 parents[rev] = (p1,) # normal node
61 61 else:
62 62 parents[rev] = (p2, p1) # merge node
63 63 heads.add(rev)
64 64 for p in parents[rev]:
65 65 heads.discard(p)
66 66 finally:
67 67 ui.progress(_('reading'), None)
68 68
69 69 heads = list(heads)
70 70 heads.sort(reverse=True)
71 71
72 72 ui.status(_('sorting revs\n'))
73 73 return postorder(heads, parents)
74 74
75 75 def toposort_postorderreverse(ui, rl):
76 76 # reverse-postorder of the reverse directed graph
77 77
78 78 children = {}
79 79 roots = set()
80 80 ui.status(_('reading revs\n'))
81 81 try:
82 82 for rev in rl:
83 83 ui.progress(_('reading'), rev, total=len(rl))
84 84 (p1, p2) = rl.parentrevs(rev)
85 85 if p1 == p2 == node.nullrev:
86 86 roots.add(rev)
87 87 children[rev] = []
88 88 if p1 != node.nullrev:
89 89 children[p1].append(rev)
90 90 if p2 != node.nullrev:
91 91 children[p2].append(rev)
92 92 finally:
93 93 ui.progress(_('reading'), None)
94 94
95 95 roots = list(roots)
96 96 roots.sort()
97 97
98 98 ui.status(_('sorting revs\n'))
99 99 result = postorder(roots, children)
100 100 result.reverse()
101 101 return result
102 102
103 103 def writerevs(ui, r1, r2, order, tr):
104 104
105 105 ui.status(_('writing revs\n'))
106 106
107 107
108 108 order = [r1.node(r) for r in order]
109 109
110 110 # this is a bit ugly, but it works
111 111 count = [0]
112 112 def lookup(revl, x):
113 113 count[0] += 1
114 114 ui.progress(_('writing'), count[0], total=len(order))
115 115 return "%020d" % revl.linkrev(revl.rev(x))
116 116
117 117 unlookup = lambda x: int(x, 10)
118 118
119 119 try:
120 120 bundler = changegroup.bundle10(lookup)
121 121 group = util.chunkbuffer(r1.group(order, bundler))
122 122 group = changegroup.unbundle10(group, "UN")
123 123 r2.addgroup(group, unlookup, tr)
124 124 finally:
125 125 ui.progress(_('writing'), None)
126 126
127 127 def report(ui, r1, r2):
128 128 def getsize(r):
129 129 s = 0
130 130 for fn in (r.indexfile, r.datafile):
131 131 try:
132 132 s += os.stat(fn).st_size
133 133 except OSError, inst:
134 134 if inst.errno != errno.ENOENT:
135 135 raise
136 136 return s
137 137
138 138 oldsize = float(getsize(r1))
139 139 newsize = float(getsize(r2))
140 140
141 141 # argh: have to pass an int to %d, because a float >= 2^32
142 142 # blows up under Python 2.5 or earlier
143 143 ui.write(_('old file size: %12d bytes (%6.1f MiB)\n')
144 144 % (int(oldsize), oldsize / 1024 / 1024))
145 145 ui.write(_('new file size: %12d bytes (%6.1f MiB)\n')
146 146 % (int(newsize), newsize / 1024 / 1024))
147 147
148 148 shrink_percent = (oldsize - newsize) / oldsize * 100
149 149 shrink_factor = oldsize / newsize
150 150 ui.write(_('shrinkage: %.1f%% (%.1fx)\n')
151 151 % (shrink_percent, shrink_factor))
152 152
153 153 def shrink(ui, repo, **opts):
154 154 """shrink a revlog by reordering revisions
155 155
156 156 Rewrites all the entries in some revlog of the current repository
157 157 (by default, the manifest log) to save space.
158 158
159 159 Different sort algorithms have different performance
160 160 characteristics. Use ``--sort`` to select a sort algorithm so you
161 161 can determine which works best for your data.
162 162 """
163 163
164 164 if not repo.local():
165 165 raise util.Abort(_('not a local repository: %s') % repo.root)
166 166
167 167 fn = opts.get('revlog')
168 168 if not fn:
169 169 indexfn = repo.sjoin('00manifest.i')
170 170 else:
171 171 if not fn.endswith('.i'):
172 172 raise util.Abort(_('--revlog option must specify the revlog index '
173 173 'file (*.i), not %s') % opts.get('revlog'))
174 174
175 indexfn = os.path.realpath(fn)
175 indexfn = util.realpath(fn)
176 176 store = repo.sjoin('')
177 177 if not indexfn.startswith(store):
178 178 raise util.Abort(_('--revlog option must specify a revlog in %s, '
179 179 'not %s') % (store, indexfn))
180 180
181 181 sortname = opts['sort']
182 182 try:
183 183 toposort = globals()['toposort_' + sortname]
184 184 except KeyError:
185 185 raise util.Abort(_('no such toposort algorithm: %s') % sortname)
186 186
187 187 if not os.path.exists(indexfn):
188 188 raise util.Abort(_('no such file: %s') % indexfn)
189 189 if '00changelog' in indexfn:
190 190 raise util.Abort(_('shrinking the changelog '
191 191 'will corrupt your repository'))
192 192
193 193 ui.write(_('shrinking %s\n') % indexfn)
194 194 prefix = os.path.basename(indexfn)[:-1]
195 195 tmpindexfn = util.mktempcopy(indexfn, emptyok=True)
196 196
197 197 r1 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), indexfn)
198 198 r2 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), tmpindexfn)
199 199
200 200 datafn, tmpdatafn = r1.datafile, r2.datafile
201 201
202 202 oldindexfn = indexfn + '.old'
203 203 olddatafn = datafn + '.old'
204 204 if os.path.exists(oldindexfn) or os.path.exists(olddatafn):
205 205 raise util.Abort(_('one or both of\n'
206 206 ' %s\n'
207 207 ' %s\n'
208 208 'exists from a previous run; please clean up '
209 209 'before running again') % (oldindexfn, olddatafn))
210 210
211 211 # Don't use repo.transaction(), because then things get hairy with
212 212 # paths: some need to be relative to .hg, and some need to be
213 213 # absolute. Doing it this way keeps things simple: everything is an
214 214 # absolute path.
215 215 lock = repo.lock(wait=False)
216 216 tr = transaction.transaction(ui.warn,
217 217 open,
218 218 repo.sjoin('journal'))
219 219
220 220 def ignoremissing(func):
221 221 def f(*args, **kw):
222 222 try:
223 223 return func(*args, **kw)
224 224 except OSError, inst:
225 225 if inst.errno != errno.ENOENT:
226 226 raise
227 227 return f
228 228
229 229 try:
230 230 try:
231 231 order = toposort(ui, r1)
232 232
233 233 suboptimal = 0
234 234 for i in xrange(1, len(order)):
235 235 parents = [p for p in r1.parentrevs(order[i])
236 236 if p != node.nullrev]
237 237 if parents and order[i - 1] not in parents:
238 238 suboptimal += 1
239 239 ui.note(_('%d suboptimal nodes\n') % suboptimal)
240 240
241 241 writerevs(ui, r1, r2, order, tr)
242 242 report(ui, r1, r2)
243 243 tr.close()
244 244 except:
245 245 # Abort transaction first, so we truncate the files before
246 246 # deleting them.
247 247 tr.abort()
248 248 for fn in (tmpindexfn, tmpdatafn):
249 249 ignoremissing(os.unlink)(fn)
250 250 raise
251 251 if not opts.get('dry_run'):
252 252 # racy, both files cannot be renamed atomically
253 253 # copy files
254 254 util.oslink(indexfn, oldindexfn)
255 255 ignoremissing(util.oslink)(datafn, olddatafn)
256 256
257 257 # rename
258 258 util.rename(tmpindexfn, indexfn)
259 259 try:
260 260 os.chmod(tmpdatafn, os.stat(datafn).st_mode)
261 261 util.rename(tmpdatafn, datafn)
262 262 except OSError, inst:
263 263 if inst.errno != errno.ENOENT:
264 264 raise
265 265 ignoremissing(os.unlink)(datafn)
266 266 else:
267 267 for fn in (tmpindexfn, tmpdatafn):
268 268 ignoremissing(os.unlink)(fn)
269 269 finally:
270 270 lock.release()
271 271
272 272 if not opts.get('dry_run'):
273 273 ui.write(_('note: old revlog saved in:\n'
274 274 ' %s\n'
275 275 ' %s\n'
276 276 '(You can delete those files when you are satisfied that your\n'
277 277 'repository is still sane. '
278 278 'Running \'hg verify\' is strongly recommended.)\n')
279 279 % (oldindexfn, olddatafn))
280 280
281 281 cmdtable = {
282 282 'shrink': (shrink,
283 283 [('', 'revlog', '', _('index (.i) file of the revlog to shrink')),
284 284 ('n', 'dry-run', None, _('do not shrink, simulate only')),
285 285 ('', 'sort', 'reversepostorder', 'name of sort algorithm to use'),
286 286 ],
287 287 _('hg shrink [--revlog PATH]'))
288 288 }
289 289
290 290 if __name__ == "__main__":
291 291 print "shrink-revlog.py is now an extension (see hg help extensions)"
@@ -1,200 +1,200
1 1 # darcs.py - darcs support for the convert extension
2 2 #
3 3 # Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from common import NoRepo, checktool, commandline, commit, converter_source
9 9 from mercurial.i18n import _
10 10 from mercurial import util
11 11 import os, shutil, tempfile, re
12 12
13 13 # The naming drift of ElementTree is fun!
14 14
15 15 try:
16 16 from xml.etree.cElementTree import ElementTree, XMLParser
17 17 except ImportError:
18 18 try:
19 19 from xml.etree.ElementTree import ElementTree, XMLParser
20 20 except ImportError:
21 21 try:
22 22 from elementtree.cElementTree import ElementTree, XMLParser
23 23 except ImportError:
24 24 try:
25 25 from elementtree.ElementTree import ElementTree, XMLParser
26 26 except ImportError:
27 27 ElementTree = None
28 28
29 29 class darcs_source(converter_source, commandline):
30 30 def __init__(self, ui, path, rev=None):
31 31 converter_source.__init__(self, ui, path, rev=rev)
32 32 commandline.__init__(self, ui, 'darcs')
33 33
34 34 # check for _darcs, ElementTree so that we can easily skip
35 35 # test-convert-darcs if ElementTree is not around
36 36 if not os.path.exists(os.path.join(path, '_darcs')):
37 37 raise NoRepo(_("%s does not look like a darcs repository") % path)
38 38
39 39 checktool('darcs')
40 40 version = self.run0('--version').splitlines()[0].strip()
41 41 if version < '2.1':
42 42 raise util.Abort(_('darcs version 2.1 or newer needed (found %r)') %
43 43 version)
44 44
45 45 if ElementTree is None:
46 46 raise util.Abort(_("Python ElementTree module is not available"))
47 47
48 self.path = os.path.realpath(path)
48 self.path = util.realpath(path)
49 49
50 50 self.lastrev = None
51 51 self.changes = {}
52 52 self.parents = {}
53 53 self.tags = {}
54 54
55 55 # Check darcs repository format
56 56 format = self.format()
57 57 if format:
58 58 if format in ('darcs-1.0', 'hashed'):
59 59 raise NoRepo(_("%s repository format is unsupported, "
60 60 "please upgrade") % format)
61 61 else:
62 62 self.ui.warn(_('failed to detect repository format!'))
63 63
64 64 def before(self):
65 65 self.tmppath = tempfile.mkdtemp(
66 66 prefix='convert-' + os.path.basename(self.path) + '-')
67 67 output, status = self.run('init', repodir=self.tmppath)
68 68 self.checkexit(status)
69 69
70 70 tree = self.xml('changes', xml_output=True, summary=True,
71 71 repodir=self.path)
72 72 tagname = None
73 73 child = None
74 74 for elt in tree.findall('patch'):
75 75 node = elt.get('hash')
76 76 name = elt.findtext('name', '')
77 77 if name.startswith('TAG '):
78 78 tagname = name[4:].strip()
79 79 elif tagname is not None:
80 80 self.tags[tagname] = node
81 81 tagname = None
82 82 self.changes[node] = elt
83 83 self.parents[child] = [node]
84 84 child = node
85 85 self.parents[child] = []
86 86
87 87 def after(self):
88 88 self.ui.debug('cleaning up %s\n' % self.tmppath)
89 89 shutil.rmtree(self.tmppath, ignore_errors=True)
90 90
91 91 def recode(self, s, encoding=None):
92 92 if isinstance(s, unicode):
93 93 # XMLParser returns unicode objects for anything it can't
94 94 # encode into ASCII. We convert them back to str to get
95 95 # recode's normal conversion behavior.
96 96 s = s.encode('latin-1')
97 97 return super(darcs_source, self).recode(s, encoding)
98 98
99 99 def xml(self, cmd, **kwargs):
100 100 # NOTE: darcs is currently encoding agnostic and will print
101 101 # patch metadata byte-for-byte, even in the XML changelog.
102 102 etree = ElementTree()
103 103 # While we are decoding the XML as latin-1 to be as liberal as
104 104 # possible, etree will still raise an exception if any
105 105 # non-printable characters are in the XML changelog.
106 106 parser = XMLParser(encoding='latin-1')
107 107 fp = self._run(cmd, **kwargs)
108 108 etree.parse(fp, parser=parser)
109 109 self.checkexit(fp.close())
110 110 return etree.getroot()
111 111
112 112 def format(self):
113 113 output, status = self.run('show', 'repo', no_files=True,
114 114 repodir=self.path)
115 115 self.checkexit(status)
116 116 m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
117 117 if not m:
118 118 return None
119 119 return ','.join(sorted(f.strip() for f in m.group(1).split(',')))
120 120
121 121 def manifest(self):
122 122 man = []
123 123 output, status = self.run('show', 'files', no_directories=True,
124 124 repodir=self.tmppath)
125 125 self.checkexit(status)
126 126 for line in output.split('\n'):
127 127 path = line[2:]
128 128 if path:
129 129 man.append(path)
130 130 return man
131 131
132 132 def getheads(self):
133 133 return self.parents[None]
134 134
135 135 def getcommit(self, rev):
136 136 elt = self.changes[rev]
137 137 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
138 138 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
139 139 # etree can return unicode objects for name, comment, and author,
140 140 # so recode() is used to ensure str objects are emitted.
141 141 return commit(author=self.recode(elt.get('author')),
142 142 date=util.datestr(date),
143 143 desc=self.recode(desc).strip(),
144 144 parents=self.parents[rev])
145 145
146 146 def pull(self, rev):
147 147 output, status = self.run('pull', self.path, all=True,
148 148 match='hash %s' % rev,
149 149 no_test=True, no_posthook=True,
150 150 external_merge='/bin/false',
151 151 repodir=self.tmppath)
152 152 if status:
153 153 if output.find('We have conflicts in') == -1:
154 154 self.checkexit(status, output)
155 155 output, status = self.run('revert', all=True, repodir=self.tmppath)
156 156 self.checkexit(status, output)
157 157
158 158 def getchanges(self, rev):
159 159 copies = {}
160 160 changes = []
161 161 man = None
162 162 for elt in self.changes[rev].find('summary').getchildren():
163 163 if elt.tag in ('add_directory', 'remove_directory'):
164 164 continue
165 165 if elt.tag == 'move':
166 166 if man is None:
167 167 man = self.manifest()
168 168 source, dest = elt.get('from'), elt.get('to')
169 169 if source in man:
170 170 # File move
171 171 changes.append((source, rev))
172 172 changes.append((dest, rev))
173 173 copies[dest] = source
174 174 else:
175 175 # Directory move, deduce file moves from manifest
176 176 source = source + '/'
177 177 for f in man:
178 178 if not f.startswith(source):
179 179 continue
180 180 fdest = dest + '/' + f[len(source):]
181 181 changes.append((f, rev))
182 182 changes.append((fdest, rev))
183 183 copies[fdest] = f
184 184 else:
185 185 changes.append((elt.text.strip(), rev))
186 186 self.pull(rev)
187 187 self.lastrev = rev
188 188 return sorted(changes), copies
189 189
190 190 def getfile(self, name, rev):
191 191 if rev != self.lastrev:
192 192 raise util.Abort(_('internal calling inconsistency'))
193 193 path = os.path.join(self.tmppath, name)
194 194 data = util.readfile(path)
195 195 mode = os.lstat(path).st_mode
196 196 mode = (mode & 0111) and 'x' or ''
197 197 return data, mode
198 198
199 199 def gettags(self):
200 200 return self.tags
@@ -1,338 +1,338
1 1 # gnuarch.py - GNU Arch support for the convert extension
2 2 #
3 3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
4 4 # and others
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from common import NoRepo, commandline, commit, converter_source
10 10 from mercurial.i18n import _
11 11 from mercurial import encoding, util
12 12 import os, shutil, tempfile, stat
13 13 from email.Parser import Parser
14 14
15 15 class gnuarch_source(converter_source, commandline):
16 16
17 17 class gnuarch_rev(object):
18 18 def __init__(self, rev):
19 19 self.rev = rev
20 20 self.summary = ''
21 21 self.date = None
22 22 self.author = ''
23 23 self.continuationof = None
24 24 self.add_files = []
25 25 self.mod_files = []
26 26 self.del_files = []
27 27 self.ren_files = {}
28 28 self.ren_dirs = {}
29 29
30 30 def __init__(self, ui, path, rev=None):
31 31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
32 32
33 33 if not os.path.exists(os.path.join(path, '{arch}')):
34 34 raise NoRepo(_("%s does not look like a GNU Arch repository")
35 35 % path)
36 36
37 37 # Could use checktool, but we want to check for baz or tla.
38 38 self.execmd = None
39 39 if util.findexe('baz'):
40 40 self.execmd = 'baz'
41 41 else:
42 42 if util.findexe('tla'):
43 43 self.execmd = 'tla'
44 44 else:
45 45 raise util.Abort(_('cannot find a GNU Arch tool'))
46 46
47 47 commandline.__init__(self, ui, self.execmd)
48 48
49 self.path = os.path.realpath(path)
49 self.path = util.realpath(path)
50 50 self.tmppath = None
51 51
52 52 self.treeversion = None
53 53 self.lastrev = None
54 54 self.changes = {}
55 55 self.parents = {}
56 56 self.tags = {}
57 57 self.catlogparser = Parser()
58 58 self.encoding = encoding.encoding
59 59 self.archives = []
60 60
61 61 def before(self):
62 62 # Get registered archives
63 63 self.archives = [i.rstrip('\n')
64 64 for i in self.runlines0('archives', '-n')]
65 65
66 66 if self.execmd == 'tla':
67 67 output = self.run0('tree-version', self.path)
68 68 else:
69 69 output = self.run0('tree-version', '-d', self.path)
70 70 self.treeversion = output.strip()
71 71
72 72 # Get name of temporary directory
73 73 version = self.treeversion.split('/')
74 74 self.tmppath = os.path.join(tempfile.gettempdir(),
75 75 'hg-%s' % version[1])
76 76
77 77 # Generate parents dictionary
78 78 self.parents[None] = []
79 79 treeversion = self.treeversion
80 80 child = None
81 81 while treeversion:
82 82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
83 83
84 84 archive = treeversion.split('/')[0]
85 85 if archive not in self.archives:
86 86 self.ui.status(_('tree analysis stopped because it points to '
87 87 'an unregistered archive %s...\n') % archive)
88 88 break
89 89
90 90 # Get the complete list of revisions for that tree version
91 91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
92 92 self.checkexit(status, 'failed retrieveing revisions for %s'
93 93 % treeversion)
94 94
95 95 # No new iteration unless a revision has a continuation-of header
96 96 treeversion = None
97 97
98 98 for l in output:
99 99 rev = l.strip()
100 100 self.changes[rev] = self.gnuarch_rev(rev)
101 101 self.parents[rev] = []
102 102
103 103 # Read author, date and summary
104 104 catlog, status = self.run('cat-log', '-d', self.path, rev)
105 105 if status:
106 106 catlog = self.run0('cat-archive-log', rev)
107 107 self._parsecatlog(catlog, rev)
108 108
109 109 # Populate the parents map
110 110 self.parents[child].append(rev)
111 111
112 112 # Keep track of the current revision as the child of the next
113 113 # revision scanned
114 114 child = rev
115 115
116 116 # Check if we have to follow the usual incremental history
117 117 # or if we have to 'jump' to a different treeversion given
118 118 # by the continuation-of header.
119 119 if self.changes[rev].continuationof:
120 120 treeversion = '--'.join(
121 121 self.changes[rev].continuationof.split('--')[:-1])
122 122 break
123 123
124 124 # If we reached a base-0 revision w/o any continuation-of
125 125 # header, it means the tree history ends here.
126 126 if rev[-6:] == 'base-0':
127 127 break
128 128
129 129 def after(self):
130 130 self.ui.debug('cleaning up %s\n' % self.tmppath)
131 131 shutil.rmtree(self.tmppath, ignore_errors=True)
132 132
133 133 def getheads(self):
134 134 return self.parents[None]
135 135
136 136 def getfile(self, name, rev):
137 137 if rev != self.lastrev:
138 138 raise util.Abort(_('internal calling inconsistency'))
139 139
140 140 # Raise IOError if necessary (i.e. deleted files).
141 141 if not os.path.lexists(os.path.join(self.tmppath, name)):
142 142 raise IOError
143 143
144 144 return self._getfile(name, rev)
145 145
146 146 def getchanges(self, rev):
147 147 self._update(rev)
148 148 changes = []
149 149 copies = {}
150 150
151 151 for f in self.changes[rev].add_files:
152 152 changes.append((f, rev))
153 153
154 154 for f in self.changes[rev].mod_files:
155 155 changes.append((f, rev))
156 156
157 157 for f in self.changes[rev].del_files:
158 158 changes.append((f, rev))
159 159
160 160 for src in self.changes[rev].ren_files:
161 161 to = self.changes[rev].ren_files[src]
162 162 changes.append((src, rev))
163 163 changes.append((to, rev))
164 164 copies[to] = src
165 165
166 166 for src in self.changes[rev].ren_dirs:
167 167 to = self.changes[rev].ren_dirs[src]
168 168 chgs, cps = self._rendirchanges(src, to)
169 169 changes += [(f, rev) for f in chgs]
170 170 copies.update(cps)
171 171
172 172 self.lastrev = rev
173 173 return sorted(set(changes)), copies
174 174
175 175 def getcommit(self, rev):
176 176 changes = self.changes[rev]
177 177 return commit(author=changes.author, date=changes.date,
178 178 desc=changes.summary, parents=self.parents[rev], rev=rev)
179 179
180 180 def gettags(self):
181 181 return self.tags
182 182
183 183 def _execute(self, cmd, *args, **kwargs):
184 184 cmdline = [self.execmd, cmd]
185 185 cmdline += args
186 186 cmdline = [util.shellquote(arg) for arg in cmdline]
187 187 cmdline += ['>', util.nulldev, '2>', util.nulldev]
188 188 cmdline = util.quotecommand(' '.join(cmdline))
189 189 self.ui.debug(cmdline, '\n')
190 190 return os.system(cmdline)
191 191
192 192 def _update(self, rev):
193 193 self.ui.debug('applying revision %s...\n' % rev)
194 194 changeset, status = self.runlines('replay', '-d', self.tmppath,
195 195 rev)
196 196 if status:
197 197 # Something went wrong while merging (baz or tla
198 198 # issue?), get latest revision and try from there
199 199 shutil.rmtree(self.tmppath, ignore_errors=True)
200 200 self._obtainrevision(rev)
201 201 else:
202 202 old_rev = self.parents[rev][0]
203 203 self.ui.debug('computing changeset between %s and %s...\n'
204 204 % (old_rev, rev))
205 205 self._parsechangeset(changeset, rev)
206 206
207 207 def _getfile(self, name, rev):
208 208 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
209 209 if stat.S_ISLNK(mode):
210 210 data = os.readlink(os.path.join(self.tmppath, name))
211 211 mode = mode and 'l' or ''
212 212 else:
213 213 data = open(os.path.join(self.tmppath, name), 'rb').read()
214 214 mode = (mode & 0111) and 'x' or ''
215 215 return data, mode
216 216
217 217 def _exclude(self, name):
218 218 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
219 219 for exc in exclude:
220 220 if name.find(exc) != -1:
221 221 return True
222 222 return False
223 223
224 224 def _readcontents(self, path):
225 225 files = []
226 226 contents = os.listdir(path)
227 227 while len(contents) > 0:
228 228 c = contents.pop()
229 229 p = os.path.join(path, c)
230 230 # os.walk could be used, but here we avoid internal GNU
231 231 # Arch files and directories, thus saving a lot time.
232 232 if not self._exclude(p):
233 233 if os.path.isdir(p):
234 234 contents += [os.path.join(c, f) for f in os.listdir(p)]
235 235 else:
236 236 files.append(c)
237 237 return files
238 238
239 239 def _rendirchanges(self, src, dest):
240 240 changes = []
241 241 copies = {}
242 242 files = self._readcontents(os.path.join(self.tmppath, dest))
243 243 for f in files:
244 244 s = os.path.join(src, f)
245 245 d = os.path.join(dest, f)
246 246 changes.append(s)
247 247 changes.append(d)
248 248 copies[d] = s
249 249 return changes, copies
250 250
251 251 def _obtainrevision(self, rev):
252 252 self.ui.debug('obtaining revision %s...\n' % rev)
253 253 output = self._execute('get', rev, self.tmppath)
254 254 self.checkexit(output)
255 255 self.ui.debug('analyzing revision %s...\n' % rev)
256 256 files = self._readcontents(self.tmppath)
257 257 self.changes[rev].add_files += files
258 258
259 259 def _stripbasepath(self, path):
260 260 if path.startswith('./'):
261 261 return path[2:]
262 262 return path
263 263
264 264 def _parsecatlog(self, data, rev):
265 265 try:
266 266 catlog = self.catlogparser.parsestr(data)
267 267
268 268 # Commit date
269 269 self.changes[rev].date = util.datestr(
270 270 util.strdate(catlog['Standard-date'],
271 271 '%Y-%m-%d %H:%M:%S'))
272 272
273 273 # Commit author
274 274 self.changes[rev].author = self.recode(catlog['Creator'])
275 275
276 276 # Commit description
277 277 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
278 278 catlog.get_payload()))
279 279 self.changes[rev].summary = self.recode(self.changes[rev].summary)
280 280
281 281 # Commit revision origin when dealing with a branch or tag
282 282 if 'Continuation-of' in catlog:
283 283 self.changes[rev].continuationof = self.recode(
284 284 catlog['Continuation-of'])
285 285 except Exception:
286 286 raise util.Abort(_('could not parse cat-log of %s') % rev)
287 287
288 288 def _parsechangeset(self, data, rev):
289 289 for l in data:
290 290 l = l.strip()
291 291 # Added file (ignore added directory)
292 292 if l.startswith('A') and not l.startswith('A/'):
293 293 file = self._stripbasepath(l[1:].strip())
294 294 if not self._exclude(file):
295 295 self.changes[rev].add_files.append(file)
296 296 # Deleted file (ignore deleted directory)
297 297 elif l.startswith('D') and not l.startswith('D/'):
298 298 file = self._stripbasepath(l[1:].strip())
299 299 if not self._exclude(file):
300 300 self.changes[rev].del_files.append(file)
301 301 # Modified binary file
302 302 elif l.startswith('Mb'):
303 303 file = self._stripbasepath(l[2:].strip())
304 304 if not self._exclude(file):
305 305 self.changes[rev].mod_files.append(file)
306 306 # Modified link
307 307 elif l.startswith('M->'):
308 308 file = self._stripbasepath(l[3:].strip())
309 309 if not self._exclude(file):
310 310 self.changes[rev].mod_files.append(file)
311 311 # Modified file
312 312 elif l.startswith('M'):
313 313 file = self._stripbasepath(l[1:].strip())
314 314 if not self._exclude(file):
315 315 self.changes[rev].mod_files.append(file)
316 316 # Renamed file (or link)
317 317 elif l.startswith('=>'):
318 318 files = l[2:].strip().split(' ')
319 319 if len(files) == 1:
320 320 files = l[2:].strip().split('\t')
321 321 src = self._stripbasepath(files[0])
322 322 dst = self._stripbasepath(files[1])
323 323 if not self._exclude(src) and not self._exclude(dst):
324 324 self.changes[rev].ren_files[src] = dst
325 325 # Conversion from file to link or from link to file (modified)
326 326 elif l.startswith('ch'):
327 327 file = self._stripbasepath(l[2:].strip())
328 328 if not self._exclude(file):
329 329 self.changes[rev].mod_files.append(file)
330 330 # Renamed directory
331 331 elif l.startswith('/>'):
332 332 dirs = l[2:].strip().split(' ')
333 333 if len(dirs) == 1:
334 334 dirs = l[2:].strip().split('\t')
335 335 src = self._stripbasepath(dirs[0])
336 336 dst = self._stripbasepath(dirs[1])
337 337 if not self._exclude(src) and not self._exclude(dst):
338 338 self.changes[rev].ren_dirs[src] = dst
@@ -1,1175 +1,1175
1 1 # Subversion 1.4/1.5 Python API backend
2 2 #
3 3 # Copyright(C) 2007 Daniel Holth et al
4 4
5 5 import os
6 6 import re
7 7 import sys
8 8 import cPickle as pickle
9 9 import tempfile
10 10 import urllib
11 11 import urllib2
12 12
13 13 from mercurial import strutil, scmutil, util, encoding
14 14 from mercurial.i18n import _
15 15
16 16 # Subversion stuff. Works best with very recent Python SVN bindings
17 17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
18 18 # these bindings.
19 19
20 20 from cStringIO import StringIO
21 21
22 22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
23 23 from common import commandline, converter_source, converter_sink, mapfile
24 24
25 25 try:
26 26 from svn.core import SubversionException, Pool
27 27 import svn
28 28 import svn.client
29 29 import svn.core
30 30 import svn.ra
31 31 import svn.delta
32 32 import transport
33 33 import warnings
34 34 warnings.filterwarnings('ignore',
35 35 module='svn.core',
36 36 category=DeprecationWarning)
37 37
38 38 except ImportError:
39 39 svn = None
40 40
41 41 class SvnPathNotFound(Exception):
42 42 pass
43 43
44 44 def revsplit(rev):
45 45 """Parse a revision string and return (uuid, path, revnum)."""
46 46 url, revnum = rev.rsplit('@', 1)
47 47 parts = url.split('/', 1)
48 48 mod = ''
49 49 if len(parts) > 1:
50 50 mod = '/' + parts[1]
51 51 return parts[0][4:], mod, int(revnum)
52 52
53 53 def geturl(path):
54 54 try:
55 55 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
56 56 except SubversionException:
57 57 pass
58 58 if os.path.isdir(path):
59 59 path = os.path.normpath(os.path.abspath(path))
60 60 if os.name == 'nt':
61 61 path = '/' + util.normpath(path)
62 62 # Module URL is later compared with the repository URL returned
63 63 # by svn API, which is UTF-8.
64 64 path = encoding.tolocal(path)
65 65 return 'file://%s' % urllib.quote(path)
66 66 return path
67 67
68 68 def optrev(number):
69 69 optrev = svn.core.svn_opt_revision_t()
70 70 optrev.kind = svn.core.svn_opt_revision_number
71 71 optrev.value.number = number
72 72 return optrev
73 73
74 74 class changedpath(object):
75 75 def __init__(self, p):
76 76 self.copyfrom_path = p.copyfrom_path
77 77 self.copyfrom_rev = p.copyfrom_rev
78 78 self.action = p.action
79 79
80 80 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
81 81 strict_node_history=False):
82 82 protocol = -1
83 83 def receiver(orig_paths, revnum, author, date, message, pool):
84 84 if orig_paths is not None:
85 85 for k, v in orig_paths.iteritems():
86 86 orig_paths[k] = changedpath(v)
87 87 pickle.dump((orig_paths, revnum, author, date, message),
88 88 fp, protocol)
89 89
90 90 try:
91 91 # Use an ra of our own so that our parent can consume
92 92 # our results without confusing the server.
93 93 t = transport.SvnRaTransport(url=url)
94 94 svn.ra.get_log(t.ra, paths, start, end, limit,
95 95 discover_changed_paths,
96 96 strict_node_history,
97 97 receiver)
98 98 except SubversionException, (inst, num):
99 99 pickle.dump(num, fp, protocol)
100 100 except IOError:
101 101 # Caller may interrupt the iteration
102 102 pickle.dump(None, fp, protocol)
103 103 else:
104 104 pickle.dump(None, fp, protocol)
105 105 fp.close()
106 106 # With large history, cleanup process goes crazy and suddenly
107 107 # consumes *huge* amount of memory. The output file being closed,
108 108 # there is no need for clean termination.
109 109 os._exit(0)
110 110
111 111 def debugsvnlog(ui, **opts):
112 112 """Fetch SVN log in a subprocess and channel them back to parent to
113 113 avoid memory collection issues.
114 114 """
115 115 util.setbinary(sys.stdin)
116 116 util.setbinary(sys.stdout)
117 117 args = decodeargs(sys.stdin.read())
118 118 get_log_child(sys.stdout, *args)
119 119
120 120 class logstream(object):
121 121 """Interruptible revision log iterator."""
122 122 def __init__(self, stdout):
123 123 self._stdout = stdout
124 124
125 125 def __iter__(self):
126 126 while True:
127 127 try:
128 128 entry = pickle.load(self._stdout)
129 129 except EOFError:
130 130 raise util.Abort(_('Mercurial failed to run itself, check'
131 131 ' hg executable is in PATH'))
132 132 try:
133 133 orig_paths, revnum, author, date, message = entry
134 134 except:
135 135 if entry is None:
136 136 break
137 137 raise SubversionException("child raised exception", entry)
138 138 yield entry
139 139
140 140 def close(self):
141 141 if self._stdout:
142 142 self._stdout.close()
143 143 self._stdout = None
144 144
145 145
146 146 # Check to see if the given path is a local Subversion repo. Verify this by
147 147 # looking for several svn-specific files and directories in the given
148 148 # directory.
149 149 def filecheck(ui, path, proto):
150 150 for x in ('locks', 'hooks', 'format', 'db'):
151 151 if not os.path.exists(os.path.join(path, x)):
152 152 return False
153 153 return True
154 154
155 155 # Check to see if a given path is the root of an svn repo over http. We verify
156 156 # this by requesting a version-controlled URL we know can't exist and looking
157 157 # for the svn-specific "not found" XML.
158 158 def httpcheck(ui, path, proto):
159 159 try:
160 160 opener = urllib2.build_opener()
161 161 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
162 162 data = rsp.read()
163 163 except urllib2.HTTPError, inst:
164 164 if inst.code != 404:
165 165 # Except for 404 we cannot know for sure this is not an svn repo
166 166 ui.warn(_('svn: cannot probe remote repository, assume it could '
167 167 'be a subversion repository. Use --source-type if you '
168 168 'know better.\n'))
169 169 return True
170 170 data = inst.fp.read()
171 171 except:
172 172 # Could be urllib2.URLError if the URL is invalid or anything else.
173 173 return False
174 174 return '<m:human-readable errcode="160013">' in data
175 175
176 176 protomap = {'http': httpcheck,
177 177 'https': httpcheck,
178 178 'file': filecheck,
179 179 }
180 180 def issvnurl(ui, url):
181 181 try:
182 182 proto, path = url.split('://', 1)
183 183 if proto == 'file':
184 184 path = urllib.url2pathname(path)
185 185 except ValueError:
186 186 proto = 'file'
187 187 path = os.path.abspath(url)
188 188 if proto == 'file':
189 189 path = path.replace(os.sep, '/')
190 190 check = protomap.get(proto, lambda *args: False)
191 191 while '/' in path:
192 192 if check(ui, path, proto):
193 193 return True
194 194 path = path.rsplit('/', 1)[0]
195 195 return False
196 196
197 197 # SVN conversion code stolen from bzr-svn and tailor
198 198 #
199 199 # Subversion looks like a versioned filesystem, branches structures
200 200 # are defined by conventions and not enforced by the tool. First,
201 201 # we define the potential branches (modules) as "trunk" and "branches"
202 202 # children directories. Revisions are then identified by their
203 203 # module and revision number (and a repository identifier).
204 204 #
205 205 # The revision graph is really a tree (or a forest). By default, a
206 206 # revision parent is the previous revision in the same module. If the
207 207 # module directory is copied/moved from another module then the
208 208 # revision is the module root and its parent the source revision in
209 209 # the parent module. A revision has at most one parent.
210 210 #
211 211 class svn_source(converter_source):
212 212 def __init__(self, ui, url, rev=None):
213 213 super(svn_source, self).__init__(ui, url, rev=rev)
214 214
215 215 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
216 216 (os.path.exists(url) and
217 217 os.path.exists(os.path.join(url, '.svn'))) or
218 218 issvnurl(ui, url)):
219 219 raise NoRepo(_("%s does not look like a Subversion repository")
220 220 % url)
221 221 if svn is None:
222 222 raise MissingTool(_('Could not load Subversion python bindings'))
223 223
224 224 try:
225 225 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
226 226 if version < (1, 4):
227 227 raise MissingTool(_('Subversion python bindings %d.%d found, '
228 228 '1.4 or later required') % version)
229 229 except AttributeError:
230 230 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
231 231 'or later required'))
232 232
233 233 self.lastrevs = {}
234 234
235 235 latest = None
236 236 try:
237 237 # Support file://path@rev syntax. Useful e.g. to convert
238 238 # deleted branches.
239 239 at = url.rfind('@')
240 240 if at >= 0:
241 241 latest = int(url[at + 1:])
242 242 url = url[:at]
243 243 except ValueError:
244 244 pass
245 245 self.url = geturl(url)
246 246 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
247 247 try:
248 248 self.transport = transport.SvnRaTransport(url=self.url)
249 249 self.ra = self.transport.ra
250 250 self.ctx = self.transport.client
251 251 self.baseurl = svn.ra.get_repos_root(self.ra)
252 252 # Module is either empty or a repository path starting with
253 253 # a slash and not ending with a slash.
254 254 self.module = urllib.unquote(self.url[len(self.baseurl):])
255 255 self.prevmodule = None
256 256 self.rootmodule = self.module
257 257 self.commits = {}
258 258 self.paths = {}
259 259 self.uuid = svn.ra.get_uuid(self.ra)
260 260 except SubversionException:
261 261 ui.traceback()
262 262 raise NoRepo(_("%s does not look like a Subversion repository")
263 263 % self.url)
264 264
265 265 if rev:
266 266 try:
267 267 latest = int(rev)
268 268 except ValueError:
269 269 raise util.Abort(_('svn: revision %s is not an integer') % rev)
270 270
271 271 self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
272 272 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
273 273 try:
274 274 self.startrev = int(self.startrev)
275 275 if self.startrev < 0:
276 276 self.startrev = 0
277 277 except ValueError:
278 278 raise util.Abort(_('svn: start revision %s is not an integer')
279 279 % self.startrev)
280 280
281 281 try:
282 282 self.head = self.latest(self.module, latest)
283 283 except SvnPathNotFound:
284 284 self.head = None
285 285 if not self.head:
286 286 raise util.Abort(_('no revision found in module %s')
287 287 % self.module)
288 288 self.last_changed = self.revnum(self.head)
289 289
290 290 self._changescache = None
291 291
292 292 if os.path.exists(os.path.join(url, '.svn/entries')):
293 293 self.wc = url
294 294 else:
295 295 self.wc = None
296 296 self.convertfp = None
297 297
298 298 def setrevmap(self, revmap):
299 299 lastrevs = {}
300 300 for revid in revmap.iterkeys():
301 301 uuid, module, revnum = revsplit(revid)
302 302 lastrevnum = lastrevs.setdefault(module, revnum)
303 303 if revnum > lastrevnum:
304 304 lastrevs[module] = revnum
305 305 self.lastrevs = lastrevs
306 306
307 307 def exists(self, path, optrev):
308 308 try:
309 309 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
310 310 optrev, False, self.ctx)
311 311 return True
312 312 except SubversionException:
313 313 return False
314 314
315 315 def getheads(self):
316 316
317 317 def isdir(path, revnum):
318 318 kind = self._checkpath(path, revnum)
319 319 return kind == svn.core.svn_node_dir
320 320
321 321 def getcfgpath(name, rev):
322 322 cfgpath = self.ui.config('convert', 'svn.' + name)
323 323 if cfgpath is not None and cfgpath.strip() == '':
324 324 return None
325 325 path = (cfgpath or name).strip('/')
326 326 if not self.exists(path, rev):
327 327 if self.module.endswith(path) and name == 'trunk':
328 328 # we are converting from inside this directory
329 329 return None
330 330 if cfgpath:
331 331 raise util.Abort(_('expected %s to be at %r, but not found')
332 332 % (name, path))
333 333 return None
334 334 self.ui.note(_('found %s at %r\n') % (name, path))
335 335 return path
336 336
337 337 rev = optrev(self.last_changed)
338 338 oldmodule = ''
339 339 trunk = getcfgpath('trunk', rev)
340 340 self.tags = getcfgpath('tags', rev)
341 341 branches = getcfgpath('branches', rev)
342 342
343 343 # If the project has a trunk or branches, we will extract heads
344 344 # from them. We keep the project root otherwise.
345 345 if trunk:
346 346 oldmodule = self.module or ''
347 347 self.module += '/' + trunk
348 348 self.head = self.latest(self.module, self.last_changed)
349 349 if not self.head:
350 350 raise util.Abort(_('no revision found in module %s')
351 351 % self.module)
352 352
353 353 # First head in the list is the module's head
354 354 self.heads = [self.head]
355 355 if self.tags is not None:
356 356 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
357 357
358 358 # Check if branches bring a few more heads to the list
359 359 if branches:
360 360 rpath = self.url.strip('/')
361 361 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
362 362 rev, False, self.ctx)
363 363 for branch in branchnames.keys():
364 364 module = '%s/%s/%s' % (oldmodule, branches, branch)
365 365 if not isdir(module, self.last_changed):
366 366 continue
367 367 brevid = self.latest(module, self.last_changed)
368 368 if not brevid:
369 369 self.ui.note(_('ignoring empty branch %s\n') % branch)
370 370 continue
371 371 self.ui.note(_('found branch %s at %d\n') %
372 372 (branch, self.revnum(brevid)))
373 373 self.heads.append(brevid)
374 374
375 375 if self.startrev and self.heads:
376 376 if len(self.heads) > 1:
377 377 raise util.Abort(_('svn: start revision is not supported '
378 378 'with more than one branch'))
379 379 revnum = self.revnum(self.heads[0])
380 380 if revnum < self.startrev:
381 381 raise util.Abort(
382 382 _('svn: no revision found after start revision %d')
383 383 % self.startrev)
384 384
385 385 return self.heads
386 386
387 387 def getchanges(self, rev):
388 388 if self._changescache and self._changescache[0] == rev:
389 389 return self._changescache[1]
390 390 self._changescache = None
391 391 (paths, parents) = self.paths[rev]
392 392 if parents:
393 393 files, self.removed, copies = self.expandpaths(rev, paths, parents)
394 394 else:
395 395 # Perform a full checkout on roots
396 396 uuid, module, revnum = revsplit(rev)
397 397 entries = svn.client.ls(self.baseurl + urllib.quote(module),
398 398 optrev(revnum), True, self.ctx)
399 399 files = [n for n, e in entries.iteritems()
400 400 if e.kind == svn.core.svn_node_file]
401 401 copies = {}
402 402 self.removed = set()
403 403
404 404 files.sort()
405 405 files = zip(files, [rev] * len(files))
406 406
407 407 # caller caches the result, so free it here to release memory
408 408 del self.paths[rev]
409 409 return (files, copies)
410 410
411 411 def getchangedfiles(self, rev, i):
412 412 changes = self.getchanges(rev)
413 413 self._changescache = (rev, changes)
414 414 return [f[0] for f in changes[0]]
415 415
416 416 def getcommit(self, rev):
417 417 if rev not in self.commits:
418 418 uuid, module, revnum = revsplit(rev)
419 419 self.module = module
420 420 self.reparent(module)
421 421 # We assume that:
422 422 # - requests for revisions after "stop" come from the
423 423 # revision graph backward traversal. Cache all of them
424 424 # down to stop, they will be used eventually.
425 425 # - requests for revisions before "stop" come to get
426 426 # isolated branches parents. Just fetch what is needed.
427 427 stop = self.lastrevs.get(module, 0)
428 428 if revnum < stop:
429 429 stop = revnum + 1
430 430 self._fetch_revisions(revnum, stop)
431 431 commit = self.commits[rev]
432 432 # caller caches the result, so free it here to release memory
433 433 del self.commits[rev]
434 434 return commit
435 435
436 436 def gettags(self):
437 437 tags = {}
438 438 if self.tags is None:
439 439 return tags
440 440
441 441 # svn tags are just a convention, project branches left in a
442 442 # 'tags' directory. There is no other relationship than
443 443 # ancestry, which is expensive to discover and makes them hard
444 444 # to update incrementally. Worse, past revisions may be
445 445 # referenced by tags far away in the future, requiring a deep
446 446 # history traversal on every calculation. Current code
447 447 # performs a single backward traversal, tracking moves within
448 448 # the tags directory (tag renaming) and recording a new tag
449 449 # everytime a project is copied from outside the tags
450 450 # directory. It also lists deleted tags, this behaviour may
451 451 # change in the future.
452 452 pendings = []
453 453 tagspath = self.tags
454 454 start = svn.ra.get_latest_revnum(self.ra)
455 455 stream = self._getlog([self.tags], start, self.startrev)
456 456 try:
457 457 for entry in stream:
458 458 origpaths, revnum, author, date, message = entry
459 459 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
460 460 in origpaths.iteritems() if e.copyfrom_path]
461 461 # Apply moves/copies from more specific to general
462 462 copies.sort(reverse=True)
463 463
464 464 srctagspath = tagspath
465 465 if copies and copies[-1][2] == tagspath:
466 466 # Track tags directory moves
467 467 srctagspath = copies.pop()[0]
468 468
469 469 for source, sourcerev, dest in copies:
470 470 if not dest.startswith(tagspath + '/'):
471 471 continue
472 472 for tag in pendings:
473 473 if tag[0].startswith(dest):
474 474 tagpath = source + tag[0][len(dest):]
475 475 tag[:2] = [tagpath, sourcerev]
476 476 break
477 477 else:
478 478 pendings.append([source, sourcerev, dest])
479 479
480 480 # Filter out tags with children coming from different
481 481 # parts of the repository like:
482 482 # /tags/tag.1 (from /trunk:10)
483 483 # /tags/tag.1/foo (from /branches/foo:12)
484 484 # Here/tags/tag.1 discarded as well as its children.
485 485 # It happens with tools like cvs2svn. Such tags cannot
486 486 # be represented in mercurial.
487 487 addeds = dict((p, e.copyfrom_path) for p, e
488 488 in origpaths.iteritems()
489 489 if e.action == 'A' and e.copyfrom_path)
490 490 badroots = set()
491 491 for destroot in addeds:
492 492 for source, sourcerev, dest in pendings:
493 493 if (not dest.startswith(destroot + '/')
494 494 or source.startswith(addeds[destroot] + '/')):
495 495 continue
496 496 badroots.add(destroot)
497 497 break
498 498
499 499 for badroot in badroots:
500 500 pendings = [p for p in pendings if p[2] != badroot
501 501 and not p[2].startswith(badroot + '/')]
502 502
503 503 # Tell tag renamings from tag creations
504 504 renamings = []
505 505 for source, sourcerev, dest in pendings:
506 506 tagname = dest.split('/')[-1]
507 507 if source.startswith(srctagspath):
508 508 renamings.append([source, sourcerev, tagname])
509 509 continue
510 510 if tagname in tags:
511 511 # Keep the latest tag value
512 512 continue
513 513 # From revision may be fake, get one with changes
514 514 try:
515 515 tagid = self.latest(source, sourcerev)
516 516 if tagid and tagname not in tags:
517 517 tags[tagname] = tagid
518 518 except SvnPathNotFound:
519 519 # It happens when we are following directories
520 520 # we assumed were copied with their parents
521 521 # but were really created in the tag
522 522 # directory.
523 523 pass
524 524 pendings = renamings
525 525 tagspath = srctagspath
526 526 finally:
527 527 stream.close()
528 528 return tags
529 529
530 530 def converted(self, rev, destrev):
531 531 if not self.wc:
532 532 return
533 533 if self.convertfp is None:
534 534 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
535 535 'a')
536 536 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
537 537 self.convertfp.flush()
538 538
539 539 def revid(self, revnum, module=None):
540 540 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
541 541
542 542 def revnum(self, rev):
543 543 return int(rev.split('@')[-1])
544 544
545 545 def latest(self, path, stop=0):
546 546 """Find the latest revid affecting path, up to stop. It may return
547 547 a revision in a different module, since a branch may be moved without
548 548 a change being reported. Return None if computed module does not
549 549 belong to rootmodule subtree.
550 550 """
551 551 if not path.startswith(self.rootmodule):
552 552 # Requests on foreign branches may be forbidden at server level
553 553 self.ui.debug('ignoring foreign branch %r\n' % path)
554 554 return None
555 555
556 556 if not stop:
557 557 stop = svn.ra.get_latest_revnum(self.ra)
558 558 try:
559 559 prevmodule = self.reparent('')
560 560 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
561 561 self.reparent(prevmodule)
562 562 except SubversionException:
563 563 dirent = None
564 564 if not dirent:
565 565 raise SvnPathNotFound(_('%s not found up to revision %d')
566 566 % (path, stop))
567 567
568 568 # stat() gives us the previous revision on this line of
569 569 # development, but it might be in *another module*. Fetch the
570 570 # log and detect renames down to the latest revision.
571 571 stream = self._getlog([path], stop, dirent.created_rev)
572 572 try:
573 573 for entry in stream:
574 574 paths, revnum, author, date, message = entry
575 575 if revnum <= dirent.created_rev:
576 576 break
577 577
578 578 for p in paths:
579 579 if not path.startswith(p) or not paths[p].copyfrom_path:
580 580 continue
581 581 newpath = paths[p].copyfrom_path + path[len(p):]
582 582 self.ui.debug("branch renamed from %s to %s at %d\n" %
583 583 (path, newpath, revnum))
584 584 path = newpath
585 585 break
586 586 finally:
587 587 stream.close()
588 588
589 589 if not path.startswith(self.rootmodule):
590 590 self.ui.debug('ignoring foreign branch %r\n' % path)
591 591 return None
592 592 return self.revid(dirent.created_rev, path)
593 593
594 594 def reparent(self, module):
595 595 """Reparent the svn transport and return the previous parent."""
596 596 if self.prevmodule == module:
597 597 return module
598 598 svnurl = self.baseurl + urllib.quote(module)
599 599 prevmodule = self.prevmodule
600 600 if prevmodule is None:
601 601 prevmodule = ''
602 602 self.ui.debug("reparent to %s\n" % svnurl)
603 603 svn.ra.reparent(self.ra, svnurl)
604 604 self.prevmodule = module
605 605 return prevmodule
606 606
607 607 def expandpaths(self, rev, paths, parents):
608 608 changed, removed = set(), set()
609 609 copies = {}
610 610
611 611 new_module, revnum = revsplit(rev)[1:]
612 612 if new_module != self.module:
613 613 self.module = new_module
614 614 self.reparent(self.module)
615 615
616 616 for i, (path, ent) in enumerate(paths):
617 617 self.ui.progress(_('scanning paths'), i, item=path,
618 618 total=len(paths))
619 619 entrypath = self.getrelpath(path)
620 620
621 621 kind = self._checkpath(entrypath, revnum)
622 622 if kind == svn.core.svn_node_file:
623 623 changed.add(self.recode(entrypath))
624 624 if not ent.copyfrom_path or not parents:
625 625 continue
626 626 # Copy sources not in parent revisions cannot be
627 627 # represented, ignore their origin for now
628 628 pmodule, prevnum = revsplit(parents[0])[1:]
629 629 if ent.copyfrom_rev < prevnum:
630 630 continue
631 631 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
632 632 if not copyfrom_path:
633 633 continue
634 634 self.ui.debug("copied to %s from %s@%s\n" %
635 635 (entrypath, copyfrom_path, ent.copyfrom_rev))
636 636 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
637 637 elif kind == 0: # gone, but had better be a deleted *file*
638 638 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
639 639 pmodule, prevnum = revsplit(parents[0])[1:]
640 640 parentpath = pmodule + "/" + entrypath
641 641 fromkind = self._checkpath(entrypath, prevnum, pmodule)
642 642
643 643 if fromkind == svn.core.svn_node_file:
644 644 removed.add(self.recode(entrypath))
645 645 elif fromkind == svn.core.svn_node_dir:
646 646 oroot = parentpath.strip('/')
647 647 nroot = path.strip('/')
648 648 children = self._iterfiles(oroot, prevnum)
649 649 for childpath in children:
650 650 childpath = childpath.replace(oroot, nroot)
651 651 childpath = self.getrelpath("/" + childpath, pmodule)
652 652 if childpath:
653 653 removed.add(self.recode(childpath))
654 654 else:
655 655 self.ui.debug('unknown path in revision %d: %s\n' % \
656 656 (revnum, path))
657 657 elif kind == svn.core.svn_node_dir:
658 658 if ent.action == 'M':
659 659 # If the directory just had a prop change,
660 660 # then we shouldn't need to look for its children.
661 661 continue
662 662 if ent.action == 'R' and parents:
663 663 # If a directory is replacing a file, mark the previous
664 664 # file as deleted
665 665 pmodule, prevnum = revsplit(parents[0])[1:]
666 666 pkind = self._checkpath(entrypath, prevnum, pmodule)
667 667 if pkind == svn.core.svn_node_file:
668 668 removed.add(self.recode(entrypath))
669 669 elif pkind == svn.core.svn_node_dir:
670 670 # We do not know what files were kept or removed,
671 671 # mark them all as changed.
672 672 for childpath in self._iterfiles(pmodule, prevnum):
673 673 childpath = self.getrelpath("/" + childpath)
674 674 if childpath:
675 675 changed.add(self.recode(childpath))
676 676
677 677 for childpath in self._iterfiles(path, revnum):
678 678 childpath = self.getrelpath("/" + childpath)
679 679 if childpath:
680 680 changed.add(self.recode(childpath))
681 681
682 682 # Handle directory copies
683 683 if not ent.copyfrom_path or not parents:
684 684 continue
685 685 # Copy sources not in parent revisions cannot be
686 686 # represented, ignore their origin for now
687 687 pmodule, prevnum = revsplit(parents[0])[1:]
688 688 if ent.copyfrom_rev < prevnum:
689 689 continue
690 690 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
691 691 if not copyfrompath:
692 692 continue
693 693 self.ui.debug("mark %s came from %s:%d\n"
694 694 % (path, copyfrompath, ent.copyfrom_rev))
695 695 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
696 696 for childpath in children:
697 697 childpath = self.getrelpath("/" + childpath, pmodule)
698 698 if not childpath:
699 699 continue
700 700 copytopath = path + childpath[len(copyfrompath):]
701 701 copytopath = self.getrelpath(copytopath)
702 702 copies[self.recode(copytopath)] = self.recode(childpath)
703 703
704 704 self.ui.progress(_('scanning paths'), None)
705 705 changed.update(removed)
706 706 return (list(changed), removed, copies)
707 707
708 708 def _fetch_revisions(self, from_revnum, to_revnum):
709 709 if from_revnum < to_revnum:
710 710 from_revnum, to_revnum = to_revnum, from_revnum
711 711
712 712 self.child_cset = None
713 713
714 714 def parselogentry(orig_paths, revnum, author, date, message):
715 715 """Return the parsed commit object or None, and True if
716 716 the revision is a branch root.
717 717 """
718 718 self.ui.debug("parsing revision %d (%d changes)\n" %
719 719 (revnum, len(orig_paths)))
720 720
721 721 branched = False
722 722 rev = self.revid(revnum)
723 723 # branch log might return entries for a parent we already have
724 724
725 725 if rev in self.commits or revnum < to_revnum:
726 726 return None, branched
727 727
728 728 parents = []
729 729 # check whether this revision is the start of a branch or part
730 730 # of a branch renaming
731 731 orig_paths = sorted(orig_paths.iteritems())
732 732 root_paths = [(p, e) for p, e in orig_paths
733 733 if self.module.startswith(p)]
734 734 if root_paths:
735 735 path, ent = root_paths[-1]
736 736 if ent.copyfrom_path:
737 737 branched = True
738 738 newpath = ent.copyfrom_path + self.module[len(path):]
739 739 # ent.copyfrom_rev may not be the actual last revision
740 740 previd = self.latest(newpath, ent.copyfrom_rev)
741 741 if previd is not None:
742 742 prevmodule, prevnum = revsplit(previd)[1:]
743 743 if prevnum >= self.startrev:
744 744 parents = [previd]
745 745 self.ui.note(
746 746 _('found parent of branch %s at %d: %s\n') %
747 747 (self.module, prevnum, prevmodule))
748 748 else:
749 749 self.ui.debug("no copyfrom path, don't know what to do.\n")
750 750
751 751 paths = []
752 752 # filter out unrelated paths
753 753 for path, ent in orig_paths:
754 754 if self.getrelpath(path) is None:
755 755 continue
756 756 paths.append((path, ent))
757 757
758 758 # Example SVN datetime. Includes microseconds.
759 759 # ISO-8601 conformant
760 760 # '2007-01-04T17:35:00.902377Z'
761 761 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
762 762
763 763 log = message and self.recode(message) or ''
764 764 author = author and self.recode(author) or ''
765 765 try:
766 766 branch = self.module.split("/")[-1]
767 767 if branch == self.trunkname:
768 768 branch = None
769 769 except IndexError:
770 770 branch = None
771 771
772 772 cset = commit(author=author,
773 773 date=util.datestr(date),
774 774 desc=log,
775 775 parents=parents,
776 776 branch=branch,
777 777 rev=rev)
778 778
779 779 self.commits[rev] = cset
780 780 # The parents list is *shared* among self.paths and the
781 781 # commit object. Both will be updated below.
782 782 self.paths[rev] = (paths, cset.parents)
783 783 if self.child_cset and not self.child_cset.parents:
784 784 self.child_cset.parents[:] = [rev]
785 785 self.child_cset = cset
786 786 return cset, branched
787 787
788 788 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
789 789 (self.module, from_revnum, to_revnum))
790 790
791 791 try:
792 792 firstcset = None
793 793 lastonbranch = False
794 794 stream = self._getlog([self.module], from_revnum, to_revnum)
795 795 try:
796 796 for entry in stream:
797 797 paths, revnum, author, date, message = entry
798 798 if revnum < self.startrev:
799 799 lastonbranch = True
800 800 break
801 801 if not paths:
802 802 self.ui.debug('revision %d has no entries\n' % revnum)
803 803 # If we ever leave the loop on an empty
804 804 # revision, do not try to get a parent branch
805 805 lastonbranch = lastonbranch or revnum == 0
806 806 continue
807 807 cset, lastonbranch = parselogentry(paths, revnum, author,
808 808 date, message)
809 809 if cset:
810 810 firstcset = cset
811 811 if lastonbranch:
812 812 break
813 813 finally:
814 814 stream.close()
815 815
816 816 if not lastonbranch and firstcset and not firstcset.parents:
817 817 # The first revision of the sequence (the last fetched one)
818 818 # has invalid parents if not a branch root. Find the parent
819 819 # revision now, if any.
820 820 try:
821 821 firstrevnum = self.revnum(firstcset.rev)
822 822 if firstrevnum > 1:
823 823 latest = self.latest(self.module, firstrevnum - 1)
824 824 if latest:
825 825 firstcset.parents.append(latest)
826 826 except SvnPathNotFound:
827 827 pass
828 828 except SubversionException, (inst, num):
829 829 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
830 830 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
831 831 raise
832 832
833 833 def getfile(self, file, rev):
834 834 # TODO: ra.get_file transmits the whole file instead of diffs.
835 835 if file in self.removed:
836 836 raise IOError()
837 837 mode = ''
838 838 try:
839 839 new_module, revnum = revsplit(rev)[1:]
840 840 if self.module != new_module:
841 841 self.module = new_module
842 842 self.reparent(self.module)
843 843 io = StringIO()
844 844 info = svn.ra.get_file(self.ra, file, revnum, io)
845 845 data = io.getvalue()
846 846 # ra.get_files() seems to keep a reference on the input buffer
847 847 # preventing collection. Release it explicitely.
848 848 io.close()
849 849 if isinstance(info, list):
850 850 info = info[-1]
851 851 mode = ("svn:executable" in info) and 'x' or ''
852 852 mode = ("svn:special" in info) and 'l' or mode
853 853 except SubversionException, e:
854 854 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
855 855 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
856 856 if e.apr_err in notfound: # File not found
857 857 raise IOError()
858 858 raise
859 859 if mode == 'l':
860 860 link_prefix = "link "
861 861 if data.startswith(link_prefix):
862 862 data = data[len(link_prefix):]
863 863 return data, mode
864 864
865 865 def _iterfiles(self, path, revnum):
866 866 """Enumerate all files in path at revnum, recursively."""
867 867 path = path.strip('/')
868 868 pool = Pool()
869 869 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
870 870 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
871 871 if path:
872 872 path += '/'
873 873 return ((path + p) for p, e in entries.iteritems()
874 874 if e.kind == svn.core.svn_node_file)
875 875
876 876 def getrelpath(self, path, module=None):
877 877 if module is None:
878 878 module = self.module
879 879 # Given the repository url of this wc, say
880 880 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
881 881 # extract the "entry" portion (a relative path) from what
882 882 # svn log --xml says, ie
883 883 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
884 884 # that is to say "tests/PloneTestCase.py"
885 885 if path.startswith(module):
886 886 relative = path.rstrip('/')[len(module):]
887 887 if relative.startswith('/'):
888 888 return relative[1:]
889 889 elif relative == '':
890 890 return relative
891 891
892 892 # The path is outside our tracked tree...
893 893 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
894 894 return None
895 895
896 896 def _checkpath(self, path, revnum, module=None):
897 897 if module is not None:
898 898 prevmodule = self.reparent('')
899 899 path = module + '/' + path
900 900 try:
901 901 # ra.check_path does not like leading slashes very much, it leads
902 902 # to PROPFIND subversion errors
903 903 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
904 904 finally:
905 905 if module is not None:
906 906 self.reparent(prevmodule)
907 907
908 908 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
909 909 strict_node_history=False):
910 910 # Normalize path names, svn >= 1.5 only wants paths relative to
911 911 # supplied URL
912 912 relpaths = []
913 913 for p in paths:
914 914 if not p.startswith('/'):
915 915 p = self.module + '/' + p
916 916 relpaths.append(p.strip('/'))
917 917 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
918 918 strict_node_history]
919 919 arg = encodeargs(args)
920 920 hgexe = util.hgexecutable()
921 921 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
922 922 stdin, stdout = util.popen2(util.quotecommand(cmd))
923 923 stdin.write(arg)
924 924 try:
925 925 stdin.close()
926 926 except IOError:
927 927 raise util.Abort(_('Mercurial failed to run itself, check'
928 928 ' hg executable is in PATH'))
929 929 return logstream(stdout)
930 930
931 931 pre_revprop_change = '''#!/bin/sh
932 932
933 933 REPOS="$1"
934 934 REV="$2"
935 935 USER="$3"
936 936 PROPNAME="$4"
937 937 ACTION="$5"
938 938
939 939 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
940 940 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
941 941 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
942 942
943 943 echo "Changing prohibited revision property" >&2
944 944 exit 1
945 945 '''
946 946
947 947 class svn_sink(converter_sink, commandline):
948 948 commit_re = re.compile(r'Committed revision (\d+).', re.M)
949 949 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
950 950
951 951 def prerun(self):
952 952 if self.wc:
953 953 os.chdir(self.wc)
954 954
955 955 def postrun(self):
956 956 if self.wc:
957 957 os.chdir(self.cwd)
958 958
959 959 def join(self, name):
960 960 return os.path.join(self.wc, '.svn', name)
961 961
962 962 def revmapfile(self):
963 963 return self.join('hg-shamap')
964 964
965 965 def authorfile(self):
966 966 return self.join('hg-authormap')
967 967
968 968 def __init__(self, ui, path):
969 969
970 970 converter_sink.__init__(self, ui, path)
971 971 commandline.__init__(self, ui, 'svn')
972 972 self.delete = []
973 973 self.setexec = []
974 974 self.delexec = []
975 975 self.copies = []
976 976 self.wc = None
977 977 self.cwd = os.getcwd()
978 978
979 path = os.path.realpath(path)
979 path = util.realpath(path)
980 980
981 981 created = False
982 982 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
983 983 self.wc = path
984 984 self.run0('update')
985 985 else:
986 986 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
987 987
988 988 if os.path.isdir(os.path.dirname(path)):
989 989 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
990 990 ui.status(_('initializing svn repository %r\n') %
991 991 os.path.basename(path))
992 992 commandline(ui, 'svnadmin').run0('create', path)
993 993 created = path
994 994 path = util.normpath(path)
995 995 if not path.startswith('/'):
996 996 path = '/' + path
997 997 path = 'file://' + path
998 998
999 999 ui.status(_('initializing svn working copy %r\n')
1000 1000 % os.path.basename(wcpath))
1001 1001 self.run0('checkout', path, wcpath)
1002 1002
1003 1003 self.wc = wcpath
1004 1004 self.opener = scmutil.opener(self.wc)
1005 1005 self.wopener = scmutil.opener(self.wc)
1006 1006 self.childmap = mapfile(ui, self.join('hg-childmap'))
1007 1007 self.is_exec = util.checkexec(self.wc) and util.isexec or None
1008 1008
1009 1009 if created:
1010 1010 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1011 1011 fp = open(hook, 'w')
1012 1012 fp.write(pre_revprop_change)
1013 1013 fp.close()
1014 1014 util.setflags(hook, False, True)
1015 1015
1016 1016 output = self.run0('info')
1017 1017 self.uuid = self.uuid_re.search(output).group(1).strip()
1018 1018
1019 1019 def wjoin(self, *names):
1020 1020 return os.path.join(self.wc, *names)
1021 1021
1022 1022 def putfile(self, filename, flags, data):
1023 1023 if 'l' in flags:
1024 1024 self.wopener.symlink(data, filename)
1025 1025 else:
1026 1026 try:
1027 1027 if os.path.islink(self.wjoin(filename)):
1028 1028 os.unlink(filename)
1029 1029 except OSError:
1030 1030 pass
1031 1031 self.wopener.write(filename, data)
1032 1032
1033 1033 if self.is_exec:
1034 1034 was_exec = self.is_exec(self.wjoin(filename))
1035 1035 else:
1036 1036 # On filesystems not supporting execute-bit, there is no way
1037 1037 # to know if it is set but asking subversion. Setting it
1038 1038 # systematically is just as expensive and much simpler.
1039 1039 was_exec = 'x' not in flags
1040 1040
1041 1041 util.setflags(self.wjoin(filename), False, 'x' in flags)
1042 1042 if was_exec:
1043 1043 if 'x' not in flags:
1044 1044 self.delexec.append(filename)
1045 1045 else:
1046 1046 if 'x' in flags:
1047 1047 self.setexec.append(filename)
1048 1048
1049 1049 def _copyfile(self, source, dest):
1050 1050 # SVN's copy command pukes if the destination file exists, but
1051 1051 # our copyfile method expects to record a copy that has
1052 1052 # already occurred. Cross the semantic gap.
1053 1053 wdest = self.wjoin(dest)
1054 1054 exists = os.path.lexists(wdest)
1055 1055 if exists:
1056 1056 fd, tempname = tempfile.mkstemp(
1057 1057 prefix='hg-copy-', dir=os.path.dirname(wdest))
1058 1058 os.close(fd)
1059 1059 os.unlink(tempname)
1060 1060 os.rename(wdest, tempname)
1061 1061 try:
1062 1062 self.run0('copy', source, dest)
1063 1063 finally:
1064 1064 if exists:
1065 1065 try:
1066 1066 os.unlink(wdest)
1067 1067 except OSError:
1068 1068 pass
1069 1069 os.rename(tempname, wdest)
1070 1070
1071 1071 def dirs_of(self, files):
1072 1072 dirs = set()
1073 1073 for f in files:
1074 1074 if os.path.isdir(self.wjoin(f)):
1075 1075 dirs.add(f)
1076 1076 for i in strutil.rfindall(f, '/'):
1077 1077 dirs.add(f[:i])
1078 1078 return dirs
1079 1079
1080 1080 def add_dirs(self, files):
1081 1081 add_dirs = [d for d in sorted(self.dirs_of(files))
1082 1082 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1083 1083 if add_dirs:
1084 1084 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1085 1085 return add_dirs
1086 1086
1087 1087 def add_files(self, files):
1088 1088 if files:
1089 1089 self.xargs(files, 'add', quiet=True)
1090 1090 return files
1091 1091
1092 1092 def tidy_dirs(self, names):
1093 1093 deleted = []
1094 1094 for d in sorted(self.dirs_of(names), reverse=True):
1095 1095 wd = self.wjoin(d)
1096 1096 if os.listdir(wd) == '.svn':
1097 1097 self.run0('delete', d)
1098 1098 deleted.append(d)
1099 1099 return deleted
1100 1100
1101 1101 def addchild(self, parent, child):
1102 1102 self.childmap[parent] = child
1103 1103
1104 1104 def revid(self, rev):
1105 1105 return u"svn:%s@%s" % (self.uuid, rev)
1106 1106
1107 1107 def putcommit(self, files, copies, parents, commit, source, revmap):
1108 1108 # Apply changes to working copy
1109 1109 for f, v in files:
1110 1110 try:
1111 1111 data, mode = source.getfile(f, v)
1112 1112 except IOError:
1113 1113 self.delete.append(f)
1114 1114 else:
1115 1115 self.putfile(f, mode, data)
1116 1116 if f in copies:
1117 1117 self.copies.append([copies[f], f])
1118 1118 files = [f[0] for f in files]
1119 1119
1120 1120 for parent in parents:
1121 1121 try:
1122 1122 return self.revid(self.childmap[parent])
1123 1123 except KeyError:
1124 1124 pass
1125 1125 entries = set(self.delete)
1126 1126 files = frozenset(files)
1127 1127 entries.update(self.add_dirs(files.difference(entries)))
1128 1128 if self.copies:
1129 1129 for s, d in self.copies:
1130 1130 self._copyfile(s, d)
1131 1131 self.copies = []
1132 1132 if self.delete:
1133 1133 self.xargs(self.delete, 'delete')
1134 1134 self.delete = []
1135 1135 entries.update(self.add_files(files.difference(entries)))
1136 1136 entries.update(self.tidy_dirs(entries))
1137 1137 if self.delexec:
1138 1138 self.xargs(self.delexec, 'propdel', 'svn:executable')
1139 1139 self.delexec = []
1140 1140 if self.setexec:
1141 1141 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1142 1142 self.setexec = []
1143 1143
1144 1144 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1145 1145 fp = os.fdopen(fd, 'w')
1146 1146 fp.write(commit.desc)
1147 1147 fp.close()
1148 1148 try:
1149 1149 output = self.run0('commit',
1150 1150 username=util.shortuser(commit.author),
1151 1151 file=messagefile,
1152 1152 encoding='utf-8')
1153 1153 try:
1154 1154 rev = self.commit_re.search(output).group(1)
1155 1155 except AttributeError:
1156 1156 if not files:
1157 1157 return parents[0]
1158 1158 self.ui.warn(_('unexpected svn output:\n'))
1159 1159 self.ui.warn(output)
1160 1160 raise util.Abort(_('unable to cope with svn output'))
1161 1161 if commit.rev:
1162 1162 self.run('propset', 'hg:convert-rev', commit.rev,
1163 1163 revprop=True, revision=rev)
1164 1164 if commit.branch and commit.branch != 'default':
1165 1165 self.run('propset', 'hg:convert-branch', commit.branch,
1166 1166 revprop=True, revision=rev)
1167 1167 for parent in parents:
1168 1168 self.addchild(parent, rev)
1169 1169 return self.revid(rev)
1170 1170 finally:
1171 1171 os.unlink(messagefile)
1172 1172
1173 1173 def puttags(self, tags):
1174 1174 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1175 1175 return None, None
@@ -1,5643 +1,5643
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _, gettext
11 11 import os, re, difflib, time, tempfile, errno
12 12 import hg, scmutil, util, revlog, extensions, copies, error, bookmarks
13 13 import patch, help, url, encoding, templatekw, discovery
14 14 import archival, changegroup, cmdutil, hbisect
15 15 import sshserver, hgweb, hgweb.server, commandserver
16 16 import merge as mergemod
17 17 import minirst, revset, fileset
18 18 import dagparser, context, simplemerge
19 19 import random, setdiscovery, treediscovery, dagutil
20 20
21 21 table = {}
22 22
23 23 command = cmdutil.command(table)
24 24
25 25 # common command options
26 26
27 27 globalopts = [
28 28 ('R', 'repository', '',
29 29 _('repository root directory or name of overlay bundle file'),
30 30 _('REPO')),
31 31 ('', 'cwd', '',
32 32 _('change working directory'), _('DIR')),
33 33 ('y', 'noninteractive', None,
34 34 _('do not prompt, automatically pick the first choice for all prompts')),
35 35 ('q', 'quiet', None, _('suppress output')),
36 36 ('v', 'verbose', None, _('enable additional output')),
37 37 ('', 'config', [],
38 38 _('set/override config option (use \'section.name=value\')'),
39 39 _('CONFIG')),
40 40 ('', 'debug', None, _('enable debugging output')),
41 41 ('', 'debugger', None, _('start debugger')),
42 42 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
43 43 _('ENCODE')),
44 44 ('', 'encodingmode', encoding.encodingmode,
45 45 _('set the charset encoding mode'), _('MODE')),
46 46 ('', 'traceback', None, _('always print a traceback on exception')),
47 47 ('', 'time', None, _('time how long the command takes')),
48 48 ('', 'profile', None, _('print command execution profile')),
49 49 ('', 'version', None, _('output version information and exit')),
50 50 ('h', 'help', None, _('display help and exit')),
51 51 ]
52 52
53 53 dryrunopts = [('n', 'dry-run', None,
54 54 _('do not perform actions, just print output'))]
55 55
56 56 remoteopts = [
57 57 ('e', 'ssh', '',
58 58 _('specify ssh command to use'), _('CMD')),
59 59 ('', 'remotecmd', '',
60 60 _('specify hg command to run on the remote side'), _('CMD')),
61 61 ('', 'insecure', None,
62 62 _('do not verify server certificate (ignoring web.cacerts config)')),
63 63 ]
64 64
65 65 walkopts = [
66 66 ('I', 'include', [],
67 67 _('include names matching the given patterns'), _('PATTERN')),
68 68 ('X', 'exclude', [],
69 69 _('exclude names matching the given patterns'), _('PATTERN')),
70 70 ]
71 71
72 72 commitopts = [
73 73 ('m', 'message', '',
74 74 _('use text as commit message'), _('TEXT')),
75 75 ('l', 'logfile', '',
76 76 _('read commit message from file'), _('FILE')),
77 77 ]
78 78
79 79 commitopts2 = [
80 80 ('d', 'date', '',
81 81 _('record the specified date as commit date'), _('DATE')),
82 82 ('u', 'user', '',
83 83 _('record the specified user as committer'), _('USER')),
84 84 ]
85 85
86 86 templateopts = [
87 87 ('', 'style', '',
88 88 _('display using template map file'), _('STYLE')),
89 89 ('', 'template', '',
90 90 _('display with template'), _('TEMPLATE')),
91 91 ]
92 92
93 93 logopts = [
94 94 ('p', 'patch', None, _('show patch')),
95 95 ('g', 'git', None, _('use git extended diff format')),
96 96 ('l', 'limit', '',
97 97 _('limit number of changes displayed'), _('NUM')),
98 98 ('M', 'no-merges', None, _('do not show merges')),
99 99 ('', 'stat', None, _('output diffstat-style summary of changes')),
100 100 ] + templateopts
101 101
102 102 diffopts = [
103 103 ('a', 'text', None, _('treat all files as text')),
104 104 ('g', 'git', None, _('use git extended diff format')),
105 105 ('', 'nodates', None, _('omit dates from diff headers'))
106 106 ]
107 107
108 108 diffopts2 = [
109 109 ('p', 'show-function', None, _('show which function each change is in')),
110 110 ('', 'reverse', None, _('produce a diff that undoes the changes')),
111 111 ('w', 'ignore-all-space', None,
112 112 _('ignore white space when comparing lines')),
113 113 ('b', 'ignore-space-change', None,
114 114 _('ignore changes in the amount of white space')),
115 115 ('B', 'ignore-blank-lines', None,
116 116 _('ignore changes whose lines are all blank')),
117 117 ('U', 'unified', '',
118 118 _('number of lines of context to show'), _('NUM')),
119 119 ('', 'stat', None, _('output diffstat-style summary of changes')),
120 120 ]
121 121
122 122 mergetoolopts = [
123 123 ('t', 'tool', '', _('specify merge tool')),
124 124 ]
125 125
126 126 similarityopts = [
127 127 ('s', 'similarity', '',
128 128 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
129 129 ]
130 130
131 131 subrepoopts = [
132 132 ('S', 'subrepos', None,
133 133 _('recurse into subrepositories'))
134 134 ]
135 135
136 136 # Commands start here, listed alphabetically
137 137
138 138 @command('^add',
139 139 walkopts + subrepoopts + dryrunopts,
140 140 _('[OPTION]... [FILE]...'))
141 141 def add(ui, repo, *pats, **opts):
142 142 """add the specified files on the next commit
143 143
144 144 Schedule files to be version controlled and added to the
145 145 repository.
146 146
147 147 The files will be added to the repository at the next commit. To
148 148 undo an add before that, see :hg:`forget`.
149 149
150 150 If no names are given, add all files to the repository.
151 151
152 152 .. container:: verbose
153 153
154 154 An example showing how new (unknown) files are added
155 155 automatically by :hg:`add`::
156 156
157 157 $ ls
158 158 foo.c
159 159 $ hg status
160 160 ? foo.c
161 161 $ hg add
162 162 adding foo.c
163 163 $ hg status
164 164 A foo.c
165 165
166 166 Returns 0 if all files are successfully added.
167 167 """
168 168
169 169 m = scmutil.match(repo[None], pats, opts)
170 170 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
171 171 opts.get('subrepos'), prefix="")
172 172 return rejected and 1 or 0
173 173
174 174 @command('addremove',
175 175 similarityopts + walkopts + dryrunopts,
176 176 _('[OPTION]... [FILE]...'))
177 177 def addremove(ui, repo, *pats, **opts):
178 178 """add all new files, delete all missing files
179 179
180 180 Add all new files and remove all missing files from the
181 181 repository.
182 182
183 183 New files are ignored if they match any of the patterns in
184 184 ``.hgignore``. As with add, these changes take effect at the next
185 185 commit.
186 186
187 187 Use the -s/--similarity option to detect renamed files. With a
188 188 parameter greater than 0, this compares every removed file with
189 189 every added file and records those similar enough as renames. This
190 190 option takes a percentage between 0 (disabled) and 100 (files must
191 191 be identical) as its parameter. Detecting renamed files this way
192 192 can be expensive. After using this option, :hg:`status -C` can be
193 193 used to check which files were identified as moved or renamed.
194 194
195 195 Returns 0 if all files are successfully added.
196 196 """
197 197 try:
198 198 sim = float(opts.get('similarity') or 100)
199 199 except ValueError:
200 200 raise util.Abort(_('similarity must be a number'))
201 201 if sim < 0 or sim > 100:
202 202 raise util.Abort(_('similarity must be between 0 and 100'))
203 203 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
204 204
205 205 @command('^annotate|blame',
206 206 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
207 207 ('', 'follow', None,
208 208 _('follow copies/renames and list the filename (DEPRECATED)')),
209 209 ('', 'no-follow', None, _("don't follow copies and renames")),
210 210 ('a', 'text', None, _('treat all files as text')),
211 211 ('u', 'user', None, _('list the author (long with -v)')),
212 212 ('f', 'file', None, _('list the filename')),
213 213 ('d', 'date', None, _('list the date (short with -q)')),
214 214 ('n', 'number', None, _('list the revision number (default)')),
215 215 ('c', 'changeset', None, _('list the changeset')),
216 216 ('l', 'line-number', None, _('show line number at the first appearance'))
217 217 ] + walkopts,
218 218 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
219 219 def annotate(ui, repo, *pats, **opts):
220 220 """show changeset information by line for each file
221 221
222 222 List changes in files, showing the revision id responsible for
223 223 each line
224 224
225 225 This command is useful for discovering when a change was made and
226 226 by whom.
227 227
228 228 Without the -a/--text option, annotate will avoid processing files
229 229 it detects as binary. With -a, annotate will annotate the file
230 230 anyway, although the results will probably be neither useful
231 231 nor desirable.
232 232
233 233 Returns 0 on success.
234 234 """
235 235 if opts.get('follow'):
236 236 # --follow is deprecated and now just an alias for -f/--file
237 237 # to mimic the behavior of Mercurial before version 1.5
238 238 opts['file'] = True
239 239
240 240 datefunc = ui.quiet and util.shortdate or util.datestr
241 241 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
242 242
243 243 if not pats:
244 244 raise util.Abort(_('at least one filename or pattern is required'))
245 245
246 246 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
247 247 ('number', ' ', lambda x: str(x[0].rev())),
248 248 ('changeset', ' ', lambda x: short(x[0].node())),
249 249 ('date', ' ', getdate),
250 250 ('file', ' ', lambda x: x[0].path()),
251 251 ('line_number', ':', lambda x: str(x[1])),
252 252 ]
253 253
254 254 if (not opts.get('user') and not opts.get('changeset')
255 255 and not opts.get('date') and not opts.get('file')):
256 256 opts['number'] = True
257 257
258 258 linenumber = opts.get('line_number') is not None
259 259 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
260 260 raise util.Abort(_('at least one of -n/-c is required for -l'))
261 261
262 262 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
263 263 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
264 264
265 265 def bad(x, y):
266 266 raise util.Abort("%s: %s" % (x, y))
267 267
268 268 ctx = scmutil.revsingle(repo, opts.get('rev'))
269 269 m = scmutil.match(ctx, pats, opts)
270 270 m.bad = bad
271 271 follow = not opts.get('no_follow')
272 272 for abs in ctx.walk(m):
273 273 fctx = ctx[abs]
274 274 if not opts.get('text') and util.binary(fctx.data()):
275 275 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
276 276 continue
277 277
278 278 lines = fctx.annotate(follow=follow, linenumber=linenumber)
279 279 pieces = []
280 280
281 281 for f, sep in funcmap:
282 282 l = [f(n) for n, dummy in lines]
283 283 if l:
284 284 sized = [(x, encoding.colwidth(x)) for x in l]
285 285 ml = max([w for x, w in sized])
286 286 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
287 287 for x, w in sized])
288 288
289 289 if pieces:
290 290 for p, l in zip(zip(*pieces), lines):
291 291 ui.write("%s: %s" % ("".join(p), l[1]))
292 292
293 293 @command('archive',
294 294 [('', 'no-decode', None, _('do not pass files through decoders')),
295 295 ('p', 'prefix', '', _('directory prefix for files in archive'),
296 296 _('PREFIX')),
297 297 ('r', 'rev', '', _('revision to distribute'), _('REV')),
298 298 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
299 299 ] + subrepoopts + walkopts,
300 300 _('[OPTION]... DEST'))
301 301 def archive(ui, repo, dest, **opts):
302 302 '''create an unversioned archive of a repository revision
303 303
304 304 By default, the revision used is the parent of the working
305 305 directory; use -r/--rev to specify a different revision.
306 306
307 307 The archive type is automatically detected based on file
308 308 extension (or override using -t/--type).
309 309
310 310 .. container:: verbose
311 311
312 312 Examples:
313 313
314 314 - create a zip file containing the 1.0 release::
315 315
316 316 hg archive -r 1.0 project-1.0.zip
317 317
318 318 - create a tarball excluding .hg files::
319 319
320 320 hg archive project.tar.gz -X ".hg*"
321 321
322 322 Valid types are:
323 323
324 324 :``files``: a directory full of files (default)
325 325 :``tar``: tar archive, uncompressed
326 326 :``tbz2``: tar archive, compressed using bzip2
327 327 :``tgz``: tar archive, compressed using gzip
328 328 :``uzip``: zip archive, uncompressed
329 329 :``zip``: zip archive, compressed using deflate
330 330
331 331 The exact name of the destination archive or directory is given
332 332 using a format string; see :hg:`help export` for details.
333 333
334 334 Each member added to an archive file has a directory prefix
335 335 prepended. Use -p/--prefix to specify a format string for the
336 336 prefix. The default is the basename of the archive, with suffixes
337 337 removed.
338 338
339 339 Returns 0 on success.
340 340 '''
341 341
342 342 ctx = scmutil.revsingle(repo, opts.get('rev'))
343 343 if not ctx:
344 344 raise util.Abort(_('no working directory: please specify a revision'))
345 345 node = ctx.node()
346 346 dest = cmdutil.makefilename(repo, dest, node)
347 if os.path.realpath(dest) == repo.root:
347 if util.realpath(dest) == repo.root:
348 348 raise util.Abort(_('repository root cannot be destination'))
349 349
350 350 kind = opts.get('type') or archival.guesskind(dest) or 'files'
351 351 prefix = opts.get('prefix')
352 352
353 353 if dest == '-':
354 354 if kind == 'files':
355 355 raise util.Abort(_('cannot archive plain files to stdout'))
356 356 dest = cmdutil.makefileobj(repo, dest)
357 357 if not prefix:
358 358 prefix = os.path.basename(repo.root) + '-%h'
359 359
360 360 prefix = cmdutil.makefilename(repo, prefix, node)
361 361 matchfn = scmutil.match(ctx, [], opts)
362 362 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
363 363 matchfn, prefix, subrepos=opts.get('subrepos'))
364 364
365 365 @command('backout',
366 366 [('', 'merge', None, _('merge with old dirstate parent after backout')),
367 367 ('', 'parent', '',
368 368 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
369 369 ('r', 'rev', '', _('revision to backout'), _('REV')),
370 370 ] + mergetoolopts + walkopts + commitopts + commitopts2,
371 371 _('[OPTION]... [-r] REV'))
372 372 def backout(ui, repo, node=None, rev=None, **opts):
373 373 '''reverse effect of earlier changeset
374 374
375 375 Prepare a new changeset with the effect of REV undone in the
376 376 current working directory.
377 377
378 378 If REV is the parent of the working directory, then this new changeset
379 379 is committed automatically. Otherwise, hg needs to merge the
380 380 changes and the merged result is left uncommitted.
381 381
382 382 .. note::
383 383 backout cannot be used to fix either an unwanted or
384 384 incorrect merge.
385 385
386 386 .. container:: verbose
387 387
388 388 By default, the pending changeset will have one parent,
389 389 maintaining a linear history. With --merge, the pending
390 390 changeset will instead have two parents: the old parent of the
391 391 working directory and a new child of REV that simply undoes REV.
392 392
393 393 Before version 1.7, the behavior without --merge was equivalent
394 394 to specifying --merge followed by :hg:`update --clean .` to
395 395 cancel the merge and leave the child of REV as a head to be
396 396 merged separately.
397 397
398 398 See :hg:`help dates` for a list of formats valid for -d/--date.
399 399
400 400 Returns 0 on success.
401 401 '''
402 402 if rev and node:
403 403 raise util.Abort(_("please specify just one revision"))
404 404
405 405 if not rev:
406 406 rev = node
407 407
408 408 if not rev:
409 409 raise util.Abort(_("please specify a revision to backout"))
410 410
411 411 date = opts.get('date')
412 412 if date:
413 413 opts['date'] = util.parsedate(date)
414 414
415 415 cmdutil.bailifchanged(repo)
416 416 node = scmutil.revsingle(repo, rev).node()
417 417
418 418 op1, op2 = repo.dirstate.parents()
419 419 a = repo.changelog.ancestor(op1, node)
420 420 if a != node:
421 421 raise util.Abort(_('cannot backout change on a different branch'))
422 422
423 423 p1, p2 = repo.changelog.parents(node)
424 424 if p1 == nullid:
425 425 raise util.Abort(_('cannot backout a change with no parents'))
426 426 if p2 != nullid:
427 427 if not opts.get('parent'):
428 428 raise util.Abort(_('cannot backout a merge changeset'))
429 429 p = repo.lookup(opts['parent'])
430 430 if p not in (p1, p2):
431 431 raise util.Abort(_('%s is not a parent of %s') %
432 432 (short(p), short(node)))
433 433 parent = p
434 434 else:
435 435 if opts.get('parent'):
436 436 raise util.Abort(_('cannot use --parent on non-merge changeset'))
437 437 parent = p1
438 438
439 439 # the backout should appear on the same branch
440 440 branch = repo.dirstate.branch()
441 441 hg.clean(repo, node, show_stats=False)
442 442 repo.dirstate.setbranch(branch)
443 443 revert_opts = opts.copy()
444 444 revert_opts['date'] = None
445 445 revert_opts['all'] = True
446 446 revert_opts['rev'] = hex(parent)
447 447 revert_opts['no_backup'] = None
448 448 revert(ui, repo, **revert_opts)
449 449 if not opts.get('merge') and op1 != node:
450 450 try:
451 451 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
452 452 return hg.update(repo, op1)
453 453 finally:
454 454 ui.setconfig('ui', 'forcemerge', '')
455 455
456 456 commit_opts = opts.copy()
457 457 commit_opts['addremove'] = False
458 458 if not commit_opts['message'] and not commit_opts['logfile']:
459 459 # we don't translate commit messages
460 460 commit_opts['message'] = "Backed out changeset %s" % short(node)
461 461 commit_opts['force_editor'] = True
462 462 commit(ui, repo, **commit_opts)
463 463 def nice(node):
464 464 return '%d:%s' % (repo.changelog.rev(node), short(node))
465 465 ui.status(_('changeset %s backs out changeset %s\n') %
466 466 (nice(repo.changelog.tip()), nice(node)))
467 467 if opts.get('merge') and op1 != node:
468 468 hg.clean(repo, op1, show_stats=False)
469 469 ui.status(_('merging with changeset %s\n')
470 470 % nice(repo.changelog.tip()))
471 471 try:
472 472 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
473 473 return hg.merge(repo, hex(repo.changelog.tip()))
474 474 finally:
475 475 ui.setconfig('ui', 'forcemerge', '')
476 476 return 0
477 477
478 478 @command('bisect',
479 479 [('r', 'reset', False, _('reset bisect state')),
480 480 ('g', 'good', False, _('mark changeset good')),
481 481 ('b', 'bad', False, _('mark changeset bad')),
482 482 ('s', 'skip', False, _('skip testing changeset')),
483 483 ('e', 'extend', False, _('extend the bisect range')),
484 484 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
485 485 ('U', 'noupdate', False, _('do not update to target'))],
486 486 _("[-gbsr] [-U] [-c CMD] [REV]"))
487 487 def bisect(ui, repo, rev=None, extra=None, command=None,
488 488 reset=None, good=None, bad=None, skip=None, extend=None,
489 489 noupdate=None):
490 490 """subdivision search of changesets
491 491
492 492 This command helps to find changesets which introduce problems. To
493 493 use, mark the earliest changeset you know exhibits the problem as
494 494 bad, then mark the latest changeset which is free from the problem
495 495 as good. Bisect will update your working directory to a revision
496 496 for testing (unless the -U/--noupdate option is specified). Once
497 497 you have performed tests, mark the working directory as good or
498 498 bad, and bisect will either update to another candidate changeset
499 499 or announce that it has found the bad revision.
500 500
501 501 As a shortcut, you can also use the revision argument to mark a
502 502 revision as good or bad without checking it out first.
503 503
504 504 If you supply a command, it will be used for automatic bisection.
505 505 Its exit status will be used to mark revisions as good or bad:
506 506 status 0 means good, 125 means to skip the revision, 127
507 507 (command not found) will abort the bisection, and any other
508 508 non-zero exit status means the revision is bad.
509 509
510 510 .. container:: verbose
511 511
512 512 Some examples:
513 513
514 514 - start a bisection with known bad revision 12, and good revision 34::
515 515
516 516 hg bisect --bad 34
517 517 hg bisect --good 12
518 518
519 519 - advance the current bisection by marking current revision as good or
520 520 bad::
521 521
522 522 hg bisect --good
523 523 hg bisect --bad
524 524
525 525 - mark the current revision, or a known revision, to be skipped (eg. if
526 526 that revision is not usable because of another issue)::
527 527
528 528 hg bisect --skip
529 529 hg bisect --skip 23
530 530
531 531 - forget the current bisection::
532 532
533 533 hg bisect --reset
534 534
535 535 - use 'make && make tests' to automatically find the first broken
536 536 revision::
537 537
538 538 hg bisect --reset
539 539 hg bisect --bad 34
540 540 hg bisect --good 12
541 541 hg bisect --command 'make && make tests'
542 542
543 543 - see all changesets whose states are already known in the current
544 544 bisection::
545 545
546 546 hg log -r "bisect(pruned)"
547 547
548 548 - see all changesets that took part in the current bisection::
549 549
550 550 hg log -r "bisect(range)"
551 551
552 552 - with the graphlog extension, you can even get a nice graph::
553 553
554 554 hg log --graph -r "bisect(range)"
555 555
556 556 See :hg:`help revsets` for more about the `bisect()` keyword.
557 557
558 558 Returns 0 on success.
559 559 """
560 560 def extendbisectrange(nodes, good):
561 561 # bisect is incomplete when it ends on a merge node and
562 562 # one of the parent was not checked.
563 563 parents = repo[nodes[0]].parents()
564 564 if len(parents) > 1:
565 565 side = good and state['bad'] or state['good']
566 566 num = len(set(i.node() for i in parents) & set(side))
567 567 if num == 1:
568 568 return parents[0].ancestor(parents[1])
569 569 return None
570 570
571 571 def print_result(nodes, good):
572 572 displayer = cmdutil.show_changeset(ui, repo, {})
573 573 if len(nodes) == 1:
574 574 # narrowed it down to a single revision
575 575 if good:
576 576 ui.write(_("The first good revision is:\n"))
577 577 else:
578 578 ui.write(_("The first bad revision is:\n"))
579 579 displayer.show(repo[nodes[0]])
580 580 extendnode = extendbisectrange(nodes, good)
581 581 if extendnode is not None:
582 582 ui.write(_('Not all ancestors of this changeset have been'
583 583 ' checked.\nUse bisect --extend to continue the '
584 584 'bisection from\nthe common ancestor, %s.\n')
585 585 % extendnode)
586 586 else:
587 587 # multiple possible revisions
588 588 if good:
589 589 ui.write(_("Due to skipped revisions, the first "
590 590 "good revision could be any of:\n"))
591 591 else:
592 592 ui.write(_("Due to skipped revisions, the first "
593 593 "bad revision could be any of:\n"))
594 594 for n in nodes:
595 595 displayer.show(repo[n])
596 596 displayer.close()
597 597
598 598 def check_state(state, interactive=True):
599 599 if not state['good'] or not state['bad']:
600 600 if (good or bad or skip or reset) and interactive:
601 601 return
602 602 if not state['good']:
603 603 raise util.Abort(_('cannot bisect (no known good revisions)'))
604 604 else:
605 605 raise util.Abort(_('cannot bisect (no known bad revisions)'))
606 606 return True
607 607
608 608 # backward compatibility
609 609 if rev in "good bad reset init".split():
610 610 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
611 611 cmd, rev, extra = rev, extra, None
612 612 if cmd == "good":
613 613 good = True
614 614 elif cmd == "bad":
615 615 bad = True
616 616 else:
617 617 reset = True
618 618 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
619 619 raise util.Abort(_('incompatible arguments'))
620 620
621 621 if reset:
622 622 p = repo.join("bisect.state")
623 623 if os.path.exists(p):
624 624 os.unlink(p)
625 625 return
626 626
627 627 state = hbisect.load_state(repo)
628 628
629 629 if command:
630 630 changesets = 1
631 631 try:
632 632 while changesets:
633 633 # update state
634 634 status = util.system(command, out=ui.fout)
635 635 if status == 125:
636 636 transition = "skip"
637 637 elif status == 0:
638 638 transition = "good"
639 639 # status < 0 means process was killed
640 640 elif status == 127:
641 641 raise util.Abort(_("failed to execute %s") % command)
642 642 elif status < 0:
643 643 raise util.Abort(_("%s killed") % command)
644 644 else:
645 645 transition = "bad"
646 646 ctx = scmutil.revsingle(repo, rev)
647 647 rev = None # clear for future iterations
648 648 state[transition].append(ctx.node())
649 649 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
650 650 check_state(state, interactive=False)
651 651 # bisect
652 652 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
653 653 # update to next check
654 654 cmdutil.bailifchanged(repo)
655 655 hg.clean(repo, nodes[0], show_stats=False)
656 656 finally:
657 657 hbisect.save_state(repo, state)
658 658 print_result(nodes, good)
659 659 return
660 660
661 661 # update state
662 662
663 663 if rev:
664 664 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
665 665 else:
666 666 nodes = [repo.lookup('.')]
667 667
668 668 if good or bad or skip:
669 669 if good:
670 670 state['good'] += nodes
671 671 elif bad:
672 672 state['bad'] += nodes
673 673 elif skip:
674 674 state['skip'] += nodes
675 675 hbisect.save_state(repo, state)
676 676
677 677 if not check_state(state):
678 678 return
679 679
680 680 # actually bisect
681 681 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
682 682 if extend:
683 683 if not changesets:
684 684 extendnode = extendbisectrange(nodes, good)
685 685 if extendnode is not None:
686 686 ui.write(_("Extending search to changeset %d:%s\n"
687 687 % (extendnode.rev(), extendnode)))
688 688 if noupdate:
689 689 return
690 690 cmdutil.bailifchanged(repo)
691 691 return hg.clean(repo, extendnode.node())
692 692 raise util.Abort(_("nothing to extend"))
693 693
694 694 if changesets == 0:
695 695 print_result(nodes, good)
696 696 else:
697 697 assert len(nodes) == 1 # only a single node can be tested next
698 698 node = nodes[0]
699 699 # compute the approximate number of remaining tests
700 700 tests, size = 0, 2
701 701 while size <= changesets:
702 702 tests, size = tests + 1, size * 2
703 703 rev = repo.changelog.rev(node)
704 704 ui.write(_("Testing changeset %d:%s "
705 705 "(%d changesets remaining, ~%d tests)\n")
706 706 % (rev, short(node), changesets, tests))
707 707 if not noupdate:
708 708 cmdutil.bailifchanged(repo)
709 709 return hg.clean(repo, node)
710 710
711 711 @command('bookmarks',
712 712 [('f', 'force', False, _('force')),
713 713 ('r', 'rev', '', _('revision'), _('REV')),
714 714 ('d', 'delete', False, _('delete a given bookmark')),
715 715 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
716 716 ('i', 'inactive', False, _('do not mark a new bookmark active'))],
717 717 _('hg bookmarks [-f] [-d] [-i] [-m NAME] [-r REV] [NAME]'))
718 718 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
719 719 rename=None, inactive=False):
720 720 '''track a line of development with movable markers
721 721
722 722 Bookmarks are pointers to certain commits that move when
723 723 committing. Bookmarks are local. They can be renamed, copied and
724 724 deleted. It is possible to use bookmark names in :hg:`merge` and
725 725 :hg:`update` to merge and update respectively to a given bookmark.
726 726
727 727 You can use :hg:`bookmark NAME` to set a bookmark on the working
728 728 directory's parent revision with the given name. If you specify
729 729 a revision using -r REV (where REV may be an existing bookmark),
730 730 the bookmark is assigned to that revision.
731 731
732 732 Bookmarks can be pushed and pulled between repositories (see :hg:`help
733 733 push` and :hg:`help pull`). This requires both the local and remote
734 734 repositories to support bookmarks. For versions prior to 1.8, this means
735 735 the bookmarks extension must be enabled.
736 736 '''
737 737 hexfn = ui.debugflag and hex or short
738 738 marks = repo._bookmarks
739 739 cur = repo.changectx('.').node()
740 740
741 741 if rename:
742 742 if rename not in marks:
743 743 raise util.Abort(_("bookmark '%s' does not exist") % rename)
744 744 if mark in marks and not force:
745 745 raise util.Abort(_("bookmark '%s' already exists "
746 746 "(use -f to force)") % mark)
747 747 if mark is None:
748 748 raise util.Abort(_("new bookmark name required"))
749 749 marks[mark] = marks[rename]
750 750 if repo._bookmarkcurrent == rename and not inactive:
751 751 bookmarks.setcurrent(repo, mark)
752 752 del marks[rename]
753 753 bookmarks.write(repo)
754 754 return
755 755
756 756 if delete:
757 757 if mark is None:
758 758 raise util.Abort(_("bookmark name required"))
759 759 if mark not in marks:
760 760 raise util.Abort(_("bookmark '%s' does not exist") % mark)
761 761 if mark == repo._bookmarkcurrent:
762 762 bookmarks.setcurrent(repo, None)
763 763 del marks[mark]
764 764 bookmarks.write(repo)
765 765 return
766 766
767 767 if mark is not None:
768 768 if "\n" in mark:
769 769 raise util.Abort(_("bookmark name cannot contain newlines"))
770 770 mark = mark.strip()
771 771 if not mark:
772 772 raise util.Abort(_("bookmark names cannot consist entirely of "
773 773 "whitespace"))
774 774 if inactive and mark == repo._bookmarkcurrent:
775 775 bookmarks.setcurrent(repo, None)
776 776 return
777 777 if mark in marks and not force:
778 778 raise util.Abort(_("bookmark '%s' already exists "
779 779 "(use -f to force)") % mark)
780 780 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
781 781 and not force):
782 782 raise util.Abort(
783 783 _("a bookmark cannot have the name of an existing branch"))
784 784 if rev:
785 785 marks[mark] = repo.lookup(rev)
786 786 else:
787 787 marks[mark] = repo.changectx('.').node()
788 788 if not inactive and repo.changectx('.').node() == marks[mark]:
789 789 bookmarks.setcurrent(repo, mark)
790 790 bookmarks.write(repo)
791 791 return
792 792
793 793 if mark is None:
794 794 if rev:
795 795 raise util.Abort(_("bookmark name required"))
796 796 if len(marks) == 0:
797 797 ui.status(_("no bookmarks set\n"))
798 798 else:
799 799 for bmark, n in sorted(marks.iteritems()):
800 800 current = repo._bookmarkcurrent
801 801 if bmark == current and n == cur:
802 802 prefix, label = '*', 'bookmarks.current'
803 803 else:
804 804 prefix, label = ' ', ''
805 805
806 806 if ui.quiet:
807 807 ui.write("%s\n" % bmark, label=label)
808 808 else:
809 809 ui.write(" %s %-25s %d:%s\n" % (
810 810 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
811 811 label=label)
812 812 return
813 813
814 814 @command('branch',
815 815 [('f', 'force', None,
816 816 _('set branch name even if it shadows an existing branch')),
817 817 ('C', 'clean', None, _('reset branch name to parent branch name'))],
818 818 _('[-fC] [NAME]'))
819 819 def branch(ui, repo, label=None, **opts):
820 820 """set or show the current branch name
821 821
822 822 With no argument, show the current branch name. With one argument,
823 823 set the working directory branch name (the branch will not exist
824 824 in the repository until the next commit). Standard practice
825 825 recommends that primary development take place on the 'default'
826 826 branch.
827 827
828 828 Unless -f/--force is specified, branch will not let you set a
829 829 branch name that already exists, even if it's inactive.
830 830
831 831 Use -C/--clean to reset the working directory branch to that of
832 832 the parent of the working directory, negating a previous branch
833 833 change.
834 834
835 835 Use the command :hg:`update` to switch to an existing branch. Use
836 836 :hg:`commit --close-branch` to mark this branch as closed.
837 837
838 838 .. note::
839 839 Branch names are permanent. Use :hg:`bookmark` to create a
840 840 light-weight bookmark instead. See :hg:`help glossary` for more
841 841 information about named branches and bookmarks.
842 842
843 843 Returns 0 on success.
844 844 """
845 845
846 846 if opts.get('clean'):
847 847 label = repo[None].p1().branch()
848 848 repo.dirstate.setbranch(label)
849 849 ui.status(_('reset working directory to branch %s\n') % label)
850 850 elif label:
851 851 if not opts.get('force') and label in repo.branchtags():
852 852 if label not in [p.branch() for p in repo.parents()]:
853 853 raise util.Abort(_('a branch of the same name already exists'),
854 854 # i18n: "it" refers to an existing branch
855 855 hint=_("use 'hg update' to switch to it"))
856 856 repo.dirstate.setbranch(label)
857 857 ui.status(_('marked working directory as branch %s\n') % label)
858 858 else:
859 859 ui.write("%s\n" % repo.dirstate.branch())
860 860
861 861 @command('branches',
862 862 [('a', 'active', False, _('show only branches that have unmerged heads')),
863 863 ('c', 'closed', False, _('show normal and closed branches'))],
864 864 _('[-ac]'))
865 865 def branches(ui, repo, active=False, closed=False):
866 866 """list repository named branches
867 867
868 868 List the repository's named branches, indicating which ones are
869 869 inactive. If -c/--closed is specified, also list branches which have
870 870 been marked closed (see :hg:`commit --close-branch`).
871 871
872 872 If -a/--active is specified, only show active branches. A branch
873 873 is considered active if it contains repository heads.
874 874
875 875 Use the command :hg:`update` to switch to an existing branch.
876 876
877 877 Returns 0.
878 878 """
879 879
880 880 hexfunc = ui.debugflag and hex or short
881 881 activebranches = [repo[n].branch() for n in repo.heads()]
882 882 def testactive(tag, node):
883 883 realhead = tag in activebranches
884 884 open = node in repo.branchheads(tag, closed=False)
885 885 return realhead and open
886 886 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
887 887 for tag, node in repo.branchtags().items()],
888 888 reverse=True)
889 889
890 890 for isactive, node, tag in branches:
891 891 if (not active) or isactive:
892 892 if ui.quiet:
893 893 ui.write("%s\n" % tag)
894 894 else:
895 895 hn = repo.lookup(node)
896 896 if isactive:
897 897 label = 'branches.active'
898 898 notice = ''
899 899 elif hn not in repo.branchheads(tag, closed=False):
900 900 if not closed:
901 901 continue
902 902 label = 'branches.closed'
903 903 notice = _(' (closed)')
904 904 else:
905 905 label = 'branches.inactive'
906 906 notice = _(' (inactive)')
907 907 if tag == repo.dirstate.branch():
908 908 label = 'branches.current'
909 909 rev = str(node).rjust(31 - encoding.colwidth(tag))
910 910 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
911 911 tag = ui.label(tag, label)
912 912 ui.write("%s %s%s\n" % (tag, rev, notice))
913 913
914 914 @command('bundle',
915 915 [('f', 'force', None, _('run even when the destination is unrelated')),
916 916 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
917 917 _('REV')),
918 918 ('b', 'branch', [], _('a specific branch you would like to bundle'),
919 919 _('BRANCH')),
920 920 ('', 'base', [],
921 921 _('a base changeset assumed to be available at the destination'),
922 922 _('REV')),
923 923 ('a', 'all', None, _('bundle all changesets in the repository')),
924 924 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
925 925 ] + remoteopts,
926 926 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
927 927 def bundle(ui, repo, fname, dest=None, **opts):
928 928 """create a changegroup file
929 929
930 930 Generate a compressed changegroup file collecting changesets not
931 931 known to be in another repository.
932 932
933 933 If you omit the destination repository, then hg assumes the
934 934 destination will have all the nodes you specify with --base
935 935 parameters. To create a bundle containing all changesets, use
936 936 -a/--all (or --base null).
937 937
938 938 You can change compression method with the -t/--type option.
939 939 The available compression methods are: none, bzip2, and
940 940 gzip (by default, bundles are compressed using bzip2).
941 941
942 942 The bundle file can then be transferred using conventional means
943 943 and applied to another repository with the unbundle or pull
944 944 command. This is useful when direct push and pull are not
945 945 available or when exporting an entire repository is undesirable.
946 946
947 947 Applying bundles preserves all changeset contents including
948 948 permissions, copy/rename information, and revision history.
949 949
950 950 Returns 0 on success, 1 if no changes found.
951 951 """
952 952 revs = None
953 953 if 'rev' in opts:
954 954 revs = scmutil.revrange(repo, opts['rev'])
955 955
956 956 if opts.get('all'):
957 957 base = ['null']
958 958 else:
959 959 base = scmutil.revrange(repo, opts.get('base'))
960 960 if base:
961 961 if dest:
962 962 raise util.Abort(_("--base is incompatible with specifying "
963 963 "a destination"))
964 964 common = [repo.lookup(rev) for rev in base]
965 965 heads = revs and map(repo.lookup, revs) or revs
966 966 else:
967 967 dest = ui.expandpath(dest or 'default-push', dest or 'default')
968 968 dest, branches = hg.parseurl(dest, opts.get('branch'))
969 969 other = hg.peer(repo, opts, dest)
970 970 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
971 971 heads = revs and map(repo.lookup, revs) or revs
972 972 common, outheads = discovery.findcommonoutgoing(repo, other,
973 973 onlyheads=heads,
974 974 force=opts.get('force'))
975 975
976 976 cg = repo.getbundle('bundle', common=common, heads=heads)
977 977 if not cg:
978 978 ui.status(_("no changes found\n"))
979 979 return 1
980 980
981 981 bundletype = opts.get('type', 'bzip2').lower()
982 982 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
983 983 bundletype = btypes.get(bundletype)
984 984 if bundletype not in changegroup.bundletypes:
985 985 raise util.Abort(_('unknown bundle type specified with --type'))
986 986
987 987 changegroup.writebundle(cg, fname, bundletype)
988 988
989 989 @command('cat',
990 990 [('o', 'output', '',
991 991 _('print output to file with formatted name'), _('FORMAT')),
992 992 ('r', 'rev', '', _('print the given revision'), _('REV')),
993 993 ('', 'decode', None, _('apply any matching decode filter')),
994 994 ] + walkopts,
995 995 _('[OPTION]... FILE...'))
996 996 def cat(ui, repo, file1, *pats, **opts):
997 997 """output the current or given revision of files
998 998
999 999 Print the specified files as they were at the given revision. If
1000 1000 no revision is given, the parent of the working directory is used,
1001 1001 or tip if no revision is checked out.
1002 1002
1003 1003 Output may be to a file, in which case the name of the file is
1004 1004 given using a format string. The formatting rules are the same as
1005 1005 for the export command, with the following additions:
1006 1006
1007 1007 :``%s``: basename of file being printed
1008 1008 :``%d``: dirname of file being printed, or '.' if in repository root
1009 1009 :``%p``: root-relative path name of file being printed
1010 1010
1011 1011 Returns 0 on success.
1012 1012 """
1013 1013 ctx = scmutil.revsingle(repo, opts.get('rev'))
1014 1014 err = 1
1015 1015 m = scmutil.match(ctx, (file1,) + pats, opts)
1016 1016 for abs in ctx.walk(m):
1017 1017 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1018 1018 pathname=abs)
1019 1019 data = ctx[abs].data()
1020 1020 if opts.get('decode'):
1021 1021 data = repo.wwritedata(abs, data)
1022 1022 fp.write(data)
1023 1023 fp.close()
1024 1024 err = 0
1025 1025 return err
1026 1026
1027 1027 @command('^clone',
1028 1028 [('U', 'noupdate', None,
1029 1029 _('the clone will include an empty working copy (only a repository)')),
1030 1030 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1031 1031 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1032 1032 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1033 1033 ('', 'pull', None, _('use pull protocol to copy metadata')),
1034 1034 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1035 1035 ] + remoteopts,
1036 1036 _('[OPTION]... SOURCE [DEST]'))
1037 1037 def clone(ui, source, dest=None, **opts):
1038 1038 """make a copy of an existing repository
1039 1039
1040 1040 Create a copy of an existing repository in a new directory.
1041 1041
1042 1042 If no destination directory name is specified, it defaults to the
1043 1043 basename of the source.
1044 1044
1045 1045 The location of the source is added to the new repository's
1046 1046 ``.hg/hgrc`` file, as the default to be used for future pulls.
1047 1047
1048 1048 Only local paths and ``ssh://`` URLs are supported as
1049 1049 destinations. For ``ssh://`` destinations, no working directory or
1050 1050 ``.hg/hgrc`` will be created on the remote side.
1051 1051
1052 1052 To pull only a subset of changesets, specify one or more revisions
1053 1053 identifiers with -r/--rev or branches with -b/--branch. The
1054 1054 resulting clone will contain only the specified changesets and
1055 1055 their ancestors. These options (or 'clone src#rev dest') imply
1056 1056 --pull, even for local source repositories. Note that specifying a
1057 1057 tag will include the tagged changeset but not the changeset
1058 1058 containing the tag.
1059 1059
1060 1060 To check out a particular version, use -u/--update, or
1061 1061 -U/--noupdate to create a clone with no working directory.
1062 1062
1063 1063 .. container:: verbose
1064 1064
1065 1065 For efficiency, hardlinks are used for cloning whenever the
1066 1066 source and destination are on the same filesystem (note this
1067 1067 applies only to the repository data, not to the working
1068 1068 directory). Some filesystems, such as AFS, implement hardlinking
1069 1069 incorrectly, but do not report errors. In these cases, use the
1070 1070 --pull option to avoid hardlinking.
1071 1071
1072 1072 In some cases, you can clone repositories and the working
1073 1073 directory using full hardlinks with ::
1074 1074
1075 1075 $ cp -al REPO REPOCLONE
1076 1076
1077 1077 This is the fastest way to clone, but it is not always safe. The
1078 1078 operation is not atomic (making sure REPO is not modified during
1079 1079 the operation is up to you) and you have to make sure your
1080 1080 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1081 1081 so). Also, this is not compatible with certain extensions that
1082 1082 place their metadata under the .hg directory, such as mq.
1083 1083
1084 1084 Mercurial will update the working directory to the first applicable
1085 1085 revision from this list:
1086 1086
1087 1087 a) null if -U or the source repository has no changesets
1088 1088 b) if -u . and the source repository is local, the first parent of
1089 1089 the source repository's working directory
1090 1090 c) the changeset specified with -u (if a branch name, this means the
1091 1091 latest head of that branch)
1092 1092 d) the changeset specified with -r
1093 1093 e) the tipmost head specified with -b
1094 1094 f) the tipmost head specified with the url#branch source syntax
1095 1095 g) the tipmost head of the default branch
1096 1096 h) tip
1097 1097
1098 1098 Examples:
1099 1099
1100 1100 - clone a remote repository to a new directory named hg/::
1101 1101
1102 1102 hg clone http://selenic.com/hg
1103 1103
1104 1104 - create a lightweight local clone::
1105 1105
1106 1106 hg clone project/ project-feature/
1107 1107
1108 1108 - clone from an absolute path on an ssh server (note double-slash)::
1109 1109
1110 1110 hg clone ssh://user@server//home/projects/alpha/
1111 1111
1112 1112 - do a high-speed clone over a LAN while checking out a
1113 1113 specified version::
1114 1114
1115 1115 hg clone --uncompressed http://server/repo -u 1.5
1116 1116
1117 1117 - create a repository without changesets after a particular revision::
1118 1118
1119 1119 hg clone -r 04e544 experimental/ good/
1120 1120
1121 1121 - clone (and track) a particular named branch::
1122 1122
1123 1123 hg clone http://selenic.com/hg#stable
1124 1124
1125 1125 See :hg:`help urls` for details on specifying URLs.
1126 1126
1127 1127 Returns 0 on success.
1128 1128 """
1129 1129 if opts.get('noupdate') and opts.get('updaterev'):
1130 1130 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1131 1131
1132 1132 r = hg.clone(ui, opts, source, dest,
1133 1133 pull=opts.get('pull'),
1134 1134 stream=opts.get('uncompressed'),
1135 1135 rev=opts.get('rev'),
1136 1136 update=opts.get('updaterev') or not opts.get('noupdate'),
1137 1137 branch=opts.get('branch'))
1138 1138
1139 1139 return r is None
1140 1140
1141 1141 @command('^commit|ci',
1142 1142 [('A', 'addremove', None,
1143 1143 _('mark new/missing files as added/removed before committing')),
1144 1144 ('', 'close-branch', None,
1145 1145 _('mark a branch as closed, hiding it from the branch list')),
1146 1146 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1147 1147 _('[OPTION]... [FILE]...'))
1148 1148 def commit(ui, repo, *pats, **opts):
1149 1149 """commit the specified files or all outstanding changes
1150 1150
1151 1151 Commit changes to the given files into the repository. Unlike a
1152 1152 centralized SCM, this operation is a local operation. See
1153 1153 :hg:`push` for a way to actively distribute your changes.
1154 1154
1155 1155 If a list of files is omitted, all changes reported by :hg:`status`
1156 1156 will be committed.
1157 1157
1158 1158 If you are committing the result of a merge, do not provide any
1159 1159 filenames or -I/-X filters.
1160 1160
1161 1161 If no commit message is specified, Mercurial starts your
1162 1162 configured editor where you can enter a message. In case your
1163 1163 commit fails, you will find a backup of your message in
1164 1164 ``.hg/last-message.txt``.
1165 1165
1166 1166 See :hg:`help dates` for a list of formats valid for -d/--date.
1167 1167
1168 1168 Returns 0 on success, 1 if nothing changed.
1169 1169 """
1170 1170 if opts.get('subrepos'):
1171 1171 # Let --subrepos on the command line overide config setting.
1172 1172 ui.setconfig('ui', 'commitsubrepos', True)
1173 1173
1174 1174 extra = {}
1175 1175 if opts.get('close_branch'):
1176 1176 if repo['.'].node() not in repo.branchheads():
1177 1177 # The topo heads set is included in the branch heads set of the
1178 1178 # current branch, so it's sufficient to test branchheads
1179 1179 raise util.Abort(_('can only close branch heads'))
1180 1180 extra['close'] = 1
1181 1181 e = cmdutil.commiteditor
1182 1182 if opts.get('force_editor'):
1183 1183 e = cmdutil.commitforceeditor
1184 1184
1185 1185 def commitfunc(ui, repo, message, match, opts):
1186 1186 return repo.commit(message, opts.get('user'), opts.get('date'), match,
1187 1187 editor=e, extra=extra)
1188 1188
1189 1189 branch = repo[None].branch()
1190 1190 bheads = repo.branchheads(branch)
1191 1191
1192 1192 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1193 1193 if not node:
1194 1194 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1195 1195 if stat[3]:
1196 1196 ui.status(_("nothing changed (%d missing files, see 'hg status')\n")
1197 1197 % len(stat[3]))
1198 1198 else:
1199 1199 ui.status(_("nothing changed\n"))
1200 1200 return 1
1201 1201
1202 1202 ctx = repo[node]
1203 1203 parents = ctx.parents()
1204 1204
1205 1205 if (bheads and node not in bheads and not
1206 1206 [x for x in parents if x.node() in bheads and x.branch() == branch]):
1207 1207 ui.status(_('created new head\n'))
1208 1208 # The message is not printed for initial roots. For the other
1209 1209 # changesets, it is printed in the following situations:
1210 1210 #
1211 1211 # Par column: for the 2 parents with ...
1212 1212 # N: null or no parent
1213 1213 # B: parent is on another named branch
1214 1214 # C: parent is a regular non head changeset
1215 1215 # H: parent was a branch head of the current branch
1216 1216 # Msg column: whether we print "created new head" message
1217 1217 # In the following, it is assumed that there already exists some
1218 1218 # initial branch heads of the current branch, otherwise nothing is
1219 1219 # printed anyway.
1220 1220 #
1221 1221 # Par Msg Comment
1222 1222 # NN y additional topo root
1223 1223 #
1224 1224 # BN y additional branch root
1225 1225 # CN y additional topo head
1226 1226 # HN n usual case
1227 1227 #
1228 1228 # BB y weird additional branch root
1229 1229 # CB y branch merge
1230 1230 # HB n merge with named branch
1231 1231 #
1232 1232 # CC y additional head from merge
1233 1233 # CH n merge with a head
1234 1234 #
1235 1235 # HH n head merge: head count decreases
1236 1236
1237 1237 if not opts.get('close_branch'):
1238 1238 for r in parents:
1239 1239 if r.extra().get('close') and r.branch() == branch:
1240 1240 ui.status(_('reopening closed branch head %d\n') % r)
1241 1241
1242 1242 if ui.debugflag:
1243 1243 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
1244 1244 elif ui.verbose:
1245 1245 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
1246 1246
1247 1247 @command('copy|cp',
1248 1248 [('A', 'after', None, _('record a copy that has already occurred')),
1249 1249 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1250 1250 ] + walkopts + dryrunopts,
1251 1251 _('[OPTION]... [SOURCE]... DEST'))
1252 1252 def copy(ui, repo, *pats, **opts):
1253 1253 """mark files as copied for the next commit
1254 1254
1255 1255 Mark dest as having copies of source files. If dest is a
1256 1256 directory, copies are put in that directory. If dest is a file,
1257 1257 the source must be a single file.
1258 1258
1259 1259 By default, this command copies the contents of files as they
1260 1260 exist in the working directory. If invoked with -A/--after, the
1261 1261 operation is recorded, but no copying is performed.
1262 1262
1263 1263 This command takes effect with the next commit. To undo a copy
1264 1264 before that, see :hg:`revert`.
1265 1265
1266 1266 Returns 0 on success, 1 if errors are encountered.
1267 1267 """
1268 1268 wlock = repo.wlock(False)
1269 1269 try:
1270 1270 return cmdutil.copy(ui, repo, pats, opts)
1271 1271 finally:
1272 1272 wlock.release()
1273 1273
1274 1274 @command('debugancestor', [], _('[INDEX] REV1 REV2'))
1275 1275 def debugancestor(ui, repo, *args):
1276 1276 """find the ancestor revision of two revisions in a given index"""
1277 1277 if len(args) == 3:
1278 1278 index, rev1, rev2 = args
1279 1279 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1280 1280 lookup = r.lookup
1281 1281 elif len(args) == 2:
1282 1282 if not repo:
1283 1283 raise util.Abort(_("there is no Mercurial repository here "
1284 1284 "(.hg not found)"))
1285 1285 rev1, rev2 = args
1286 1286 r = repo.changelog
1287 1287 lookup = repo.lookup
1288 1288 else:
1289 1289 raise util.Abort(_('either two or three arguments required'))
1290 1290 a = r.ancestor(lookup(rev1), lookup(rev2))
1291 1291 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1292 1292
1293 1293 @command('debugbuilddag',
1294 1294 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1295 1295 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1296 1296 ('n', 'new-file', None, _('add new file at each rev'))],
1297 1297 _('[OPTION]... [TEXT]'))
1298 1298 def debugbuilddag(ui, repo, text=None,
1299 1299 mergeable_file=False,
1300 1300 overwritten_file=False,
1301 1301 new_file=False):
1302 1302 """builds a repo with a given DAG from scratch in the current empty repo
1303 1303
1304 1304 The description of the DAG is read from stdin if not given on the
1305 1305 command line.
1306 1306
1307 1307 Elements:
1308 1308
1309 1309 - "+n" is a linear run of n nodes based on the current default parent
1310 1310 - "." is a single node based on the current default parent
1311 1311 - "$" resets the default parent to null (implied at the start);
1312 1312 otherwise the default parent is always the last node created
1313 1313 - "<p" sets the default parent to the backref p
1314 1314 - "*p" is a fork at parent p, which is a backref
1315 1315 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1316 1316 - "/p2" is a merge of the preceding node and p2
1317 1317 - ":tag" defines a local tag for the preceding node
1318 1318 - "@branch" sets the named branch for subsequent nodes
1319 1319 - "#...\\n" is a comment up to the end of the line
1320 1320
1321 1321 Whitespace between the above elements is ignored.
1322 1322
1323 1323 A backref is either
1324 1324
1325 1325 - a number n, which references the node curr-n, where curr is the current
1326 1326 node, or
1327 1327 - the name of a local tag you placed earlier using ":tag", or
1328 1328 - empty to denote the default parent.
1329 1329
1330 1330 All string valued-elements are either strictly alphanumeric, or must
1331 1331 be enclosed in double quotes ("..."), with "\\" as escape character.
1332 1332 """
1333 1333
1334 1334 if text is None:
1335 1335 ui.status(_("reading DAG from stdin\n"))
1336 1336 text = ui.fin.read()
1337 1337
1338 1338 cl = repo.changelog
1339 1339 if len(cl) > 0:
1340 1340 raise util.Abort(_('repository is not empty'))
1341 1341
1342 1342 # determine number of revs in DAG
1343 1343 total = 0
1344 1344 for type, data in dagparser.parsedag(text):
1345 1345 if type == 'n':
1346 1346 total += 1
1347 1347
1348 1348 if mergeable_file:
1349 1349 linesperrev = 2
1350 1350 # make a file with k lines per rev
1351 1351 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1352 1352 initialmergedlines.append("")
1353 1353
1354 1354 tags = []
1355 1355
1356 1356 tr = repo.transaction("builddag")
1357 1357 try:
1358 1358
1359 1359 at = -1
1360 1360 atbranch = 'default'
1361 1361 nodeids = []
1362 1362 ui.progress(_('building'), 0, unit=_('revisions'), total=total)
1363 1363 for type, data in dagparser.parsedag(text):
1364 1364 if type == 'n':
1365 1365 ui.note('node %s\n' % str(data))
1366 1366 id, ps = data
1367 1367
1368 1368 files = []
1369 1369 fctxs = {}
1370 1370
1371 1371 p2 = None
1372 1372 if mergeable_file:
1373 1373 fn = "mf"
1374 1374 p1 = repo[ps[0]]
1375 1375 if len(ps) > 1:
1376 1376 p2 = repo[ps[1]]
1377 1377 pa = p1.ancestor(p2)
1378 1378 base, local, other = [x[fn].data() for x in pa, p1, p2]
1379 1379 m3 = simplemerge.Merge3Text(base, local, other)
1380 1380 ml = [l.strip() for l in m3.merge_lines()]
1381 1381 ml.append("")
1382 1382 elif at > 0:
1383 1383 ml = p1[fn].data().split("\n")
1384 1384 else:
1385 1385 ml = initialmergedlines
1386 1386 ml[id * linesperrev] += " r%i" % id
1387 1387 mergedtext = "\n".join(ml)
1388 1388 files.append(fn)
1389 1389 fctxs[fn] = context.memfilectx(fn, mergedtext)
1390 1390
1391 1391 if overwritten_file:
1392 1392 fn = "of"
1393 1393 files.append(fn)
1394 1394 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1395 1395
1396 1396 if new_file:
1397 1397 fn = "nf%i" % id
1398 1398 files.append(fn)
1399 1399 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1400 1400 if len(ps) > 1:
1401 1401 if not p2:
1402 1402 p2 = repo[ps[1]]
1403 1403 for fn in p2:
1404 1404 if fn.startswith("nf"):
1405 1405 files.append(fn)
1406 1406 fctxs[fn] = p2[fn]
1407 1407
1408 1408 def fctxfn(repo, cx, path):
1409 1409 return fctxs.get(path)
1410 1410
1411 1411 if len(ps) == 0 or ps[0] < 0:
1412 1412 pars = [None, None]
1413 1413 elif len(ps) == 1:
1414 1414 pars = [nodeids[ps[0]], None]
1415 1415 else:
1416 1416 pars = [nodeids[p] for p in ps]
1417 1417 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1418 1418 date=(id, 0),
1419 1419 user="debugbuilddag",
1420 1420 extra={'branch': atbranch})
1421 1421 nodeid = repo.commitctx(cx)
1422 1422 nodeids.append(nodeid)
1423 1423 at = id
1424 1424 elif type == 'l':
1425 1425 id, name = data
1426 1426 ui.note('tag %s\n' % name)
1427 1427 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1428 1428 elif type == 'a':
1429 1429 ui.note('branch %s\n' % data)
1430 1430 atbranch = data
1431 1431 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1432 1432 tr.close()
1433 1433 finally:
1434 1434 ui.progress(_('building'), None)
1435 1435 tr.release()
1436 1436
1437 1437 if tags:
1438 1438 repo.opener.write("localtags", "".join(tags))
1439 1439
1440 1440 @command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
1441 1441 def debugbundle(ui, bundlepath, all=None, **opts):
1442 1442 """lists the contents of a bundle"""
1443 1443 f = url.open(ui, bundlepath)
1444 1444 try:
1445 1445 gen = changegroup.readbundle(f, bundlepath)
1446 1446 if all:
1447 1447 ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
1448 1448
1449 1449 def showchunks(named):
1450 1450 ui.write("\n%s\n" % named)
1451 1451 chain = None
1452 1452 while True:
1453 1453 chunkdata = gen.deltachunk(chain)
1454 1454 if not chunkdata:
1455 1455 break
1456 1456 node = chunkdata['node']
1457 1457 p1 = chunkdata['p1']
1458 1458 p2 = chunkdata['p2']
1459 1459 cs = chunkdata['cs']
1460 1460 deltabase = chunkdata['deltabase']
1461 1461 delta = chunkdata['delta']
1462 1462 ui.write("%s %s %s %s %s %s\n" %
1463 1463 (hex(node), hex(p1), hex(p2),
1464 1464 hex(cs), hex(deltabase), len(delta)))
1465 1465 chain = node
1466 1466
1467 1467 chunkdata = gen.changelogheader()
1468 1468 showchunks("changelog")
1469 1469 chunkdata = gen.manifestheader()
1470 1470 showchunks("manifest")
1471 1471 while True:
1472 1472 chunkdata = gen.filelogheader()
1473 1473 if not chunkdata:
1474 1474 break
1475 1475 fname = chunkdata['filename']
1476 1476 showchunks(fname)
1477 1477 else:
1478 1478 chunkdata = gen.changelogheader()
1479 1479 chain = None
1480 1480 while True:
1481 1481 chunkdata = gen.deltachunk(chain)
1482 1482 if not chunkdata:
1483 1483 break
1484 1484 node = chunkdata['node']
1485 1485 ui.write("%s\n" % hex(node))
1486 1486 chain = node
1487 1487 finally:
1488 1488 f.close()
1489 1489
1490 1490 @command('debugcheckstate', [], '')
1491 1491 def debugcheckstate(ui, repo):
1492 1492 """validate the correctness of the current dirstate"""
1493 1493 parent1, parent2 = repo.dirstate.parents()
1494 1494 m1 = repo[parent1].manifest()
1495 1495 m2 = repo[parent2].manifest()
1496 1496 errors = 0
1497 1497 for f in repo.dirstate:
1498 1498 state = repo.dirstate[f]
1499 1499 if state in "nr" and f not in m1:
1500 1500 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1501 1501 errors += 1
1502 1502 if state in "a" and f in m1:
1503 1503 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1504 1504 errors += 1
1505 1505 if state in "m" and f not in m1 and f not in m2:
1506 1506 ui.warn(_("%s in state %s, but not in either manifest\n") %
1507 1507 (f, state))
1508 1508 errors += 1
1509 1509 for f in m1:
1510 1510 state = repo.dirstate[f]
1511 1511 if state not in "nrm":
1512 1512 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1513 1513 errors += 1
1514 1514 if errors:
1515 1515 error = _(".hg/dirstate inconsistent with current parent's manifest")
1516 1516 raise util.Abort(error)
1517 1517
1518 1518 @command('debugcommands', [], _('[COMMAND]'))
1519 1519 def debugcommands(ui, cmd='', *args):
1520 1520 """list all available commands and options"""
1521 1521 for cmd, vals in sorted(table.iteritems()):
1522 1522 cmd = cmd.split('|')[0].strip('^')
1523 1523 opts = ', '.join([i[1] for i in vals[1]])
1524 1524 ui.write('%s: %s\n' % (cmd, opts))
1525 1525
1526 1526 @command('debugcomplete',
1527 1527 [('o', 'options', None, _('show the command options'))],
1528 1528 _('[-o] CMD'))
1529 1529 def debugcomplete(ui, cmd='', **opts):
1530 1530 """returns the completion list associated with the given command"""
1531 1531
1532 1532 if opts.get('options'):
1533 1533 options = []
1534 1534 otables = [globalopts]
1535 1535 if cmd:
1536 1536 aliases, entry = cmdutil.findcmd(cmd, table, False)
1537 1537 otables.append(entry[1])
1538 1538 for t in otables:
1539 1539 for o in t:
1540 1540 if "(DEPRECATED)" in o[3]:
1541 1541 continue
1542 1542 if o[0]:
1543 1543 options.append('-%s' % o[0])
1544 1544 options.append('--%s' % o[1])
1545 1545 ui.write("%s\n" % "\n".join(options))
1546 1546 return
1547 1547
1548 1548 cmdlist = cmdutil.findpossible(cmd, table)
1549 1549 if ui.verbose:
1550 1550 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1551 1551 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1552 1552
1553 1553 @command('debugdag',
1554 1554 [('t', 'tags', None, _('use tags as labels')),
1555 1555 ('b', 'branches', None, _('annotate with branch names')),
1556 1556 ('', 'dots', None, _('use dots for runs')),
1557 1557 ('s', 'spaces', None, _('separate elements by spaces'))],
1558 1558 _('[OPTION]... [FILE [REV]...]'))
1559 1559 def debugdag(ui, repo, file_=None, *revs, **opts):
1560 1560 """format the changelog or an index DAG as a concise textual description
1561 1561
1562 1562 If you pass a revlog index, the revlog's DAG is emitted. If you list
1563 1563 revision numbers, they get labelled in the output as rN.
1564 1564
1565 1565 Otherwise, the changelog DAG of the current repo is emitted.
1566 1566 """
1567 1567 spaces = opts.get('spaces')
1568 1568 dots = opts.get('dots')
1569 1569 if file_:
1570 1570 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1571 1571 revs = set((int(r) for r in revs))
1572 1572 def events():
1573 1573 for r in rlog:
1574 1574 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1575 1575 if r in revs:
1576 1576 yield 'l', (r, "r%i" % r)
1577 1577 elif repo:
1578 1578 cl = repo.changelog
1579 1579 tags = opts.get('tags')
1580 1580 branches = opts.get('branches')
1581 1581 if tags:
1582 1582 labels = {}
1583 1583 for l, n in repo.tags().items():
1584 1584 labels.setdefault(cl.rev(n), []).append(l)
1585 1585 def events():
1586 1586 b = "default"
1587 1587 for r in cl:
1588 1588 if branches:
1589 1589 newb = cl.read(cl.node(r))[5]['branch']
1590 1590 if newb != b:
1591 1591 yield 'a', newb
1592 1592 b = newb
1593 1593 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1594 1594 if tags:
1595 1595 ls = labels.get(r)
1596 1596 if ls:
1597 1597 for l in ls:
1598 1598 yield 'l', (r, l)
1599 1599 else:
1600 1600 raise util.Abort(_('need repo for changelog dag'))
1601 1601
1602 1602 for line in dagparser.dagtextlines(events(),
1603 1603 addspaces=spaces,
1604 1604 wraplabels=True,
1605 1605 wrapannotations=True,
1606 1606 wrapnonlinear=dots,
1607 1607 usedots=dots,
1608 1608 maxlinewidth=70):
1609 1609 ui.write(line)
1610 1610 ui.write("\n")
1611 1611
1612 1612 @command('debugdata',
1613 1613 [('c', 'changelog', False, _('open changelog')),
1614 1614 ('m', 'manifest', False, _('open manifest'))],
1615 1615 _('-c|-m|FILE REV'))
1616 1616 def debugdata(ui, repo, file_, rev = None, **opts):
1617 1617 """dump the contents of a data file revision"""
1618 1618 if opts.get('changelog') or opts.get('manifest'):
1619 1619 file_, rev = None, file_
1620 1620 elif rev is None:
1621 1621 raise error.CommandError('debugdata', _('invalid arguments'))
1622 1622 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1623 1623 try:
1624 1624 ui.write(r.revision(r.lookup(rev)))
1625 1625 except KeyError:
1626 1626 raise util.Abort(_('invalid revision identifier %s') % rev)
1627 1627
1628 1628 @command('debugdate',
1629 1629 [('e', 'extended', None, _('try extended date formats'))],
1630 1630 _('[-e] DATE [RANGE]'))
1631 1631 def debugdate(ui, date, range=None, **opts):
1632 1632 """parse and display a date"""
1633 1633 if opts["extended"]:
1634 1634 d = util.parsedate(date, util.extendeddateformats)
1635 1635 else:
1636 1636 d = util.parsedate(date)
1637 1637 ui.write("internal: %s %s\n" % d)
1638 1638 ui.write("standard: %s\n" % util.datestr(d))
1639 1639 if range:
1640 1640 m = util.matchdate(range)
1641 1641 ui.write("match: %s\n" % m(d[0]))
1642 1642
1643 1643 @command('debugdiscovery',
1644 1644 [('', 'old', None, _('use old-style discovery')),
1645 1645 ('', 'nonheads', None,
1646 1646 _('use old-style discovery with non-heads included')),
1647 1647 ] + remoteopts,
1648 1648 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1649 1649 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1650 1650 """runs the changeset discovery protocol in isolation"""
1651 1651 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
1652 1652 remote = hg.peer(repo, opts, remoteurl)
1653 1653 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1654 1654
1655 1655 # make sure tests are repeatable
1656 1656 random.seed(12323)
1657 1657
1658 1658 def doit(localheads, remoteheads):
1659 1659 if opts.get('old'):
1660 1660 if localheads:
1661 1661 raise util.Abort('cannot use localheads with old style discovery')
1662 1662 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1663 1663 force=True)
1664 1664 common = set(common)
1665 1665 if not opts.get('nonheads'):
1666 1666 ui.write("unpruned common: %s\n" % " ".join([short(n)
1667 1667 for n in common]))
1668 1668 dag = dagutil.revlogdag(repo.changelog)
1669 1669 all = dag.ancestorset(dag.internalizeall(common))
1670 1670 common = dag.externalizeall(dag.headsetofconnecteds(all))
1671 1671 else:
1672 1672 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1673 1673 common = set(common)
1674 1674 rheads = set(hds)
1675 1675 lheads = set(repo.heads())
1676 1676 ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
1677 1677 if lheads <= common:
1678 1678 ui.write("local is subset\n")
1679 1679 elif rheads <= common:
1680 1680 ui.write("remote is subset\n")
1681 1681
1682 1682 serverlogs = opts.get('serverlog')
1683 1683 if serverlogs:
1684 1684 for filename in serverlogs:
1685 1685 logfile = open(filename, 'r')
1686 1686 try:
1687 1687 line = logfile.readline()
1688 1688 while line:
1689 1689 parts = line.strip().split(';')
1690 1690 op = parts[1]
1691 1691 if op == 'cg':
1692 1692 pass
1693 1693 elif op == 'cgss':
1694 1694 doit(parts[2].split(' '), parts[3].split(' '))
1695 1695 elif op == 'unb':
1696 1696 doit(parts[3].split(' '), parts[2].split(' '))
1697 1697 line = logfile.readline()
1698 1698 finally:
1699 1699 logfile.close()
1700 1700
1701 1701 else:
1702 1702 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
1703 1703 opts.get('remote_head'))
1704 1704 localrevs = opts.get('local_head')
1705 1705 doit(localrevs, remoterevs)
1706 1706
1707 1707 @command('debugfileset', [], ('REVSPEC'))
1708 1708 def debugfileset(ui, repo, expr):
1709 1709 '''parse and apply a fileset specification'''
1710 1710 if ui.verbose:
1711 1711 tree = fileset.parse(expr)[0]
1712 1712 ui.note(tree, "\n")
1713 1713
1714 1714 for f in fileset.getfileset(repo[None], expr):
1715 1715 ui.write("%s\n" % f)
1716 1716
1717 1717 @command('debugfsinfo', [], _('[PATH]'))
1718 1718 def debugfsinfo(ui, path = "."):
1719 1719 """show information detected about current filesystem"""
1720 1720 util.writefile('.debugfsinfo', '')
1721 1721 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1722 1722 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1723 1723 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1724 1724 and 'yes' or 'no'))
1725 1725 os.unlink('.debugfsinfo')
1726 1726
1727 1727 @command('debuggetbundle',
1728 1728 [('H', 'head', [], _('id of head node'), _('ID')),
1729 1729 ('C', 'common', [], _('id of common node'), _('ID')),
1730 1730 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1731 1731 _('REPO FILE [-H|-C ID]...'))
1732 1732 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1733 1733 """retrieves a bundle from a repo
1734 1734
1735 1735 Every ID must be a full-length hex node id string. Saves the bundle to the
1736 1736 given file.
1737 1737 """
1738 1738 repo = hg.peer(ui, opts, repopath)
1739 1739 if not repo.capable('getbundle'):
1740 1740 raise util.Abort("getbundle() not supported by target repository")
1741 1741 args = {}
1742 1742 if common:
1743 1743 args['common'] = [bin(s) for s in common]
1744 1744 if head:
1745 1745 args['heads'] = [bin(s) for s in head]
1746 1746 bundle = repo.getbundle('debug', **args)
1747 1747
1748 1748 bundletype = opts.get('type', 'bzip2').lower()
1749 1749 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1750 1750 bundletype = btypes.get(bundletype)
1751 1751 if bundletype not in changegroup.bundletypes:
1752 1752 raise util.Abort(_('unknown bundle type specified with --type'))
1753 1753 changegroup.writebundle(bundle, bundlepath, bundletype)
1754 1754
1755 1755 @command('debugignore', [], '')
1756 1756 def debugignore(ui, repo, *values, **opts):
1757 1757 """display the combined ignore pattern"""
1758 1758 ignore = repo.dirstate._ignore
1759 1759 includepat = getattr(ignore, 'includepat', None)
1760 1760 if includepat is not None:
1761 1761 ui.write("%s\n" % includepat)
1762 1762 else:
1763 1763 raise util.Abort(_("no ignore patterns found"))
1764 1764
1765 1765 @command('debugindex',
1766 1766 [('c', 'changelog', False, _('open changelog')),
1767 1767 ('m', 'manifest', False, _('open manifest')),
1768 1768 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1769 1769 _('[-f FORMAT] -c|-m|FILE'))
1770 1770 def debugindex(ui, repo, file_ = None, **opts):
1771 1771 """dump the contents of an index file"""
1772 1772 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1773 1773 format = opts.get('format', 0)
1774 1774 if format not in (0, 1):
1775 1775 raise util.Abort(_("unknown format %d") % format)
1776 1776
1777 1777 generaldelta = r.version & revlog.REVLOGGENERALDELTA
1778 1778 if generaldelta:
1779 1779 basehdr = ' delta'
1780 1780 else:
1781 1781 basehdr = ' base'
1782 1782
1783 1783 if format == 0:
1784 1784 ui.write(" rev offset length " + basehdr + " linkrev"
1785 1785 " nodeid p1 p2\n")
1786 1786 elif format == 1:
1787 1787 ui.write(" rev flag offset length"
1788 1788 " size " + basehdr + " link p1 p2 nodeid\n")
1789 1789
1790 1790 for i in r:
1791 1791 node = r.node(i)
1792 1792 if generaldelta:
1793 1793 base = r.deltaparent(i)
1794 1794 else:
1795 1795 base = r.chainbase(i)
1796 1796 if format == 0:
1797 1797 try:
1798 1798 pp = r.parents(node)
1799 1799 except:
1800 1800 pp = [nullid, nullid]
1801 1801 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1802 1802 i, r.start(i), r.length(i), base, r.linkrev(i),
1803 1803 short(node), short(pp[0]), short(pp[1])))
1804 1804 elif format == 1:
1805 1805 pr = r.parentrevs(i)
1806 1806 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1807 1807 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1808 1808 base, r.linkrev(i), pr[0], pr[1], short(node)))
1809 1809
1810 1810 @command('debugindexdot', [], _('FILE'))
1811 1811 def debugindexdot(ui, repo, file_):
1812 1812 """dump an index DAG as a graphviz dot file"""
1813 1813 r = None
1814 1814 if repo:
1815 1815 filelog = repo.file(file_)
1816 1816 if len(filelog):
1817 1817 r = filelog
1818 1818 if not r:
1819 1819 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1820 1820 ui.write("digraph G {\n")
1821 1821 for i in r:
1822 1822 node = r.node(i)
1823 1823 pp = r.parents(node)
1824 1824 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1825 1825 if pp[1] != nullid:
1826 1826 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1827 1827 ui.write("}\n")
1828 1828
1829 1829 @command('debuginstall', [], '')
1830 1830 def debuginstall(ui):
1831 1831 '''test Mercurial installation
1832 1832
1833 1833 Returns 0 on success.
1834 1834 '''
1835 1835
1836 1836 def writetemp(contents):
1837 1837 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1838 1838 f = os.fdopen(fd, "wb")
1839 1839 f.write(contents)
1840 1840 f.close()
1841 1841 return name
1842 1842
1843 1843 problems = 0
1844 1844
1845 1845 # encoding
1846 1846 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1847 1847 try:
1848 1848 encoding.fromlocal("test")
1849 1849 except util.Abort, inst:
1850 1850 ui.write(" %s\n" % inst)
1851 1851 ui.write(_(" (check that your locale is properly set)\n"))
1852 1852 problems += 1
1853 1853
1854 1854 # compiled modules
1855 1855 ui.status(_("Checking installed modules (%s)...\n")
1856 1856 % os.path.dirname(__file__))
1857 1857 try:
1858 1858 import bdiff, mpatch, base85, osutil
1859 1859 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1860 1860 except Exception, inst:
1861 1861 ui.write(" %s\n" % inst)
1862 1862 ui.write(_(" One or more extensions could not be found"))
1863 1863 ui.write(_(" (check that you compiled the extensions)\n"))
1864 1864 problems += 1
1865 1865
1866 1866 # templates
1867 1867 import templater
1868 1868 p = templater.templatepath()
1869 1869 ui.status(_("Checking templates (%s)...\n") % ' '.join(p))
1870 1870 try:
1871 1871 templater.templater(templater.templatepath("map-cmdline.default"))
1872 1872 except Exception, inst:
1873 1873 ui.write(" %s\n" % inst)
1874 1874 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1875 1875 problems += 1
1876 1876
1877 1877 # editor
1878 1878 ui.status(_("Checking commit editor...\n"))
1879 1879 editor = ui.geteditor()
1880 1880 cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
1881 1881 if not cmdpath:
1882 1882 if editor == 'vi':
1883 1883 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1884 1884 ui.write(_(" (specify a commit editor in your configuration"
1885 1885 " file)\n"))
1886 1886 else:
1887 1887 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1888 1888 ui.write(_(" (specify a commit editor in your configuration"
1889 1889 " file)\n"))
1890 1890 problems += 1
1891 1891
1892 1892 # check username
1893 1893 ui.status(_("Checking username...\n"))
1894 1894 try:
1895 1895 ui.username()
1896 1896 except util.Abort, e:
1897 1897 ui.write(" %s\n" % e)
1898 1898 ui.write(_(" (specify a username in your configuration file)\n"))
1899 1899 problems += 1
1900 1900
1901 1901 if not problems:
1902 1902 ui.status(_("No problems detected\n"))
1903 1903 else:
1904 1904 ui.write(_("%s problems detected,"
1905 1905 " please check your install!\n") % problems)
1906 1906
1907 1907 return problems
1908 1908
1909 1909 @command('debugknown', [], _('REPO ID...'))
1910 1910 def debugknown(ui, repopath, *ids, **opts):
1911 1911 """test whether node ids are known to a repo
1912 1912
1913 1913 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1914 1914 indicating unknown/known.
1915 1915 """
1916 1916 repo = hg.peer(ui, opts, repopath)
1917 1917 if not repo.capable('known'):
1918 1918 raise util.Abort("known() not supported by target repository")
1919 1919 flags = repo.known([bin(s) for s in ids])
1920 1920 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1921 1921
1922 1922 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
1923 1923 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1924 1924 '''access the pushkey key/value protocol
1925 1925
1926 1926 With two args, list the keys in the given namespace.
1927 1927
1928 1928 With five args, set a key to new if it currently is set to old.
1929 1929 Reports success or failure.
1930 1930 '''
1931 1931
1932 1932 target = hg.peer(ui, {}, repopath)
1933 1933 if keyinfo:
1934 1934 key, old, new = keyinfo
1935 1935 r = target.pushkey(namespace, key, old, new)
1936 1936 ui.status(str(r) + '\n')
1937 1937 return not r
1938 1938 else:
1939 1939 for k, v in target.listkeys(namespace).iteritems():
1940 1940 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1941 1941 v.encode('string-escape')))
1942 1942
1943 1943 @command('debugrebuildstate',
1944 1944 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
1945 1945 _('[-r REV] [REV]'))
1946 1946 def debugrebuildstate(ui, repo, rev="tip"):
1947 1947 """rebuild the dirstate as it would look like for the given revision"""
1948 1948 ctx = scmutil.revsingle(repo, rev)
1949 1949 wlock = repo.wlock()
1950 1950 try:
1951 1951 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1952 1952 finally:
1953 1953 wlock.release()
1954 1954
1955 1955 @command('debugrename',
1956 1956 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1957 1957 _('[-r REV] FILE'))
1958 1958 def debugrename(ui, repo, file1, *pats, **opts):
1959 1959 """dump rename information"""
1960 1960
1961 1961 ctx = scmutil.revsingle(repo, opts.get('rev'))
1962 1962 m = scmutil.match(ctx, (file1,) + pats, opts)
1963 1963 for abs in ctx.walk(m):
1964 1964 fctx = ctx[abs]
1965 1965 o = fctx.filelog().renamed(fctx.filenode())
1966 1966 rel = m.rel(abs)
1967 1967 if o:
1968 1968 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1969 1969 else:
1970 1970 ui.write(_("%s not renamed\n") % rel)
1971 1971
1972 1972 @command('debugrevlog',
1973 1973 [('c', 'changelog', False, _('open changelog')),
1974 1974 ('m', 'manifest', False, _('open manifest')),
1975 1975 ('d', 'dump', False, _('dump index data'))],
1976 1976 _('-c|-m|FILE'))
1977 1977 def debugrevlog(ui, repo, file_ = None, **opts):
1978 1978 """show data and statistics about a revlog"""
1979 1979 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1980 1980
1981 1981 if opts.get("dump"):
1982 1982 numrevs = len(r)
1983 1983 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
1984 1984 " rawsize totalsize compression heads\n")
1985 1985 ts = 0
1986 1986 heads = set()
1987 1987 for rev in xrange(numrevs):
1988 1988 dbase = r.deltaparent(rev)
1989 1989 if dbase == -1:
1990 1990 dbase = rev
1991 1991 cbase = r.chainbase(rev)
1992 1992 p1, p2 = r.parentrevs(rev)
1993 1993 rs = r.rawsize(rev)
1994 1994 ts = ts + rs
1995 1995 heads -= set(r.parentrevs(rev))
1996 1996 heads.add(rev)
1997 1997 ui.write("%d %d %d %d %d %d %d %d %d %d %d %d %d\n" %
1998 1998 (rev, p1, p2, r.start(rev), r.end(rev),
1999 1999 r.start(dbase), r.start(cbase),
2000 2000 r.start(p1), r.start(p2),
2001 2001 rs, ts, ts / r.end(rev), len(heads)))
2002 2002 return 0
2003 2003
2004 2004 v = r.version
2005 2005 format = v & 0xFFFF
2006 2006 flags = []
2007 2007 gdelta = False
2008 2008 if v & revlog.REVLOGNGINLINEDATA:
2009 2009 flags.append('inline')
2010 2010 if v & revlog.REVLOGGENERALDELTA:
2011 2011 gdelta = True
2012 2012 flags.append('generaldelta')
2013 2013 if not flags:
2014 2014 flags = ['(none)']
2015 2015
2016 2016 nummerges = 0
2017 2017 numfull = 0
2018 2018 numprev = 0
2019 2019 nump1 = 0
2020 2020 nump2 = 0
2021 2021 numother = 0
2022 2022 nump1prev = 0
2023 2023 nump2prev = 0
2024 2024 chainlengths = []
2025 2025
2026 2026 datasize = [None, 0, 0L]
2027 2027 fullsize = [None, 0, 0L]
2028 2028 deltasize = [None, 0, 0L]
2029 2029
2030 2030 def addsize(size, l):
2031 2031 if l[0] is None or size < l[0]:
2032 2032 l[0] = size
2033 2033 if size > l[1]:
2034 2034 l[1] = size
2035 2035 l[2] += size
2036 2036
2037 2037 numrevs = len(r)
2038 2038 for rev in xrange(numrevs):
2039 2039 p1, p2 = r.parentrevs(rev)
2040 2040 delta = r.deltaparent(rev)
2041 2041 if format > 0:
2042 2042 addsize(r.rawsize(rev), datasize)
2043 2043 if p2 != nullrev:
2044 2044 nummerges += 1
2045 2045 size = r.length(rev)
2046 2046 if delta == nullrev:
2047 2047 chainlengths.append(0)
2048 2048 numfull += 1
2049 2049 addsize(size, fullsize)
2050 2050 else:
2051 2051 chainlengths.append(chainlengths[delta] + 1)
2052 2052 addsize(size, deltasize)
2053 2053 if delta == rev - 1:
2054 2054 numprev += 1
2055 2055 if delta == p1:
2056 2056 nump1prev += 1
2057 2057 elif delta == p2:
2058 2058 nump2prev += 1
2059 2059 elif delta == p1:
2060 2060 nump1 += 1
2061 2061 elif delta == p2:
2062 2062 nump2 += 1
2063 2063 elif delta != nullrev:
2064 2064 numother += 1
2065 2065
2066 2066 numdeltas = numrevs - numfull
2067 2067 numoprev = numprev - nump1prev - nump2prev
2068 2068 totalrawsize = datasize[2]
2069 2069 datasize[2] /= numrevs
2070 2070 fulltotal = fullsize[2]
2071 2071 fullsize[2] /= numfull
2072 2072 deltatotal = deltasize[2]
2073 2073 deltasize[2] /= numrevs - numfull
2074 2074 totalsize = fulltotal + deltatotal
2075 2075 avgchainlen = sum(chainlengths) / numrevs
2076 2076 compratio = totalrawsize / totalsize
2077 2077
2078 2078 basedfmtstr = '%%%dd\n'
2079 2079 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2080 2080
2081 2081 def dfmtstr(max):
2082 2082 return basedfmtstr % len(str(max))
2083 2083 def pcfmtstr(max, padding=0):
2084 2084 return basepcfmtstr % (len(str(max)), ' ' * padding)
2085 2085
2086 2086 def pcfmt(value, total):
2087 2087 return (value, 100 * float(value) / total)
2088 2088
2089 2089 ui.write('format : %d\n' % format)
2090 2090 ui.write('flags : %s\n' % ', '.join(flags))
2091 2091
2092 2092 ui.write('\n')
2093 2093 fmt = pcfmtstr(totalsize)
2094 2094 fmt2 = dfmtstr(totalsize)
2095 2095 ui.write('revisions : ' + fmt2 % numrevs)
2096 2096 ui.write(' merges : ' + fmt % pcfmt(nummerges, numrevs))
2097 2097 ui.write(' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs))
2098 2098 ui.write('revisions : ' + fmt2 % numrevs)
2099 2099 ui.write(' full : ' + fmt % pcfmt(numfull, numrevs))
2100 2100 ui.write(' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2101 2101 ui.write('revision size : ' + fmt2 % totalsize)
2102 2102 ui.write(' full : ' + fmt % pcfmt(fulltotal, totalsize))
2103 2103 ui.write(' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2104 2104
2105 2105 ui.write('\n')
2106 2106 fmt = dfmtstr(max(avgchainlen, compratio))
2107 2107 ui.write('avg chain length : ' + fmt % avgchainlen)
2108 2108 ui.write('compression ratio : ' + fmt % compratio)
2109 2109
2110 2110 if format > 0:
2111 2111 ui.write('\n')
2112 2112 ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
2113 2113 % tuple(datasize))
2114 2114 ui.write('full revision size (min/max/avg) : %d / %d / %d\n'
2115 2115 % tuple(fullsize))
2116 2116 ui.write('delta size (min/max/avg) : %d / %d / %d\n'
2117 2117 % tuple(deltasize))
2118 2118
2119 2119 if numdeltas > 0:
2120 2120 ui.write('\n')
2121 2121 fmt = pcfmtstr(numdeltas)
2122 2122 fmt2 = pcfmtstr(numdeltas, 4)
2123 2123 ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
2124 2124 if numprev > 0:
2125 2125 ui.write(' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev))
2126 2126 ui.write(' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev))
2127 2127 ui.write(' other : ' + fmt2 % pcfmt(numoprev, numprev))
2128 2128 if gdelta:
2129 2129 ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
2130 2130 ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
2131 2131 ui.write('deltas against other : ' + fmt % pcfmt(numother, numdeltas))
2132 2132
2133 2133 @command('debugrevspec', [], ('REVSPEC'))
2134 2134 def debugrevspec(ui, repo, expr):
2135 2135 '''parse and apply a revision specification'''
2136 2136 if ui.verbose:
2137 2137 tree = revset.parse(expr)[0]
2138 2138 ui.note(tree, "\n")
2139 2139 newtree = revset.findaliases(ui, tree)
2140 2140 if newtree != tree:
2141 2141 ui.note(newtree, "\n")
2142 2142 func = revset.match(ui, expr)
2143 2143 for c in func(repo, range(len(repo))):
2144 2144 ui.write("%s\n" % c)
2145 2145
2146 2146 @command('debugsetparents', [], _('REV1 [REV2]'))
2147 2147 def debugsetparents(ui, repo, rev1, rev2=None):
2148 2148 """manually set the parents of the current working directory
2149 2149
2150 2150 This is useful for writing repository conversion tools, but should
2151 2151 be used with care.
2152 2152
2153 2153 Returns 0 on success.
2154 2154 """
2155 2155
2156 2156 r1 = scmutil.revsingle(repo, rev1).node()
2157 2157 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2158 2158
2159 2159 wlock = repo.wlock()
2160 2160 try:
2161 2161 repo.dirstate.setparents(r1, r2)
2162 2162 finally:
2163 2163 wlock.release()
2164 2164
2165 2165 @command('debugstate',
2166 2166 [('', 'nodates', None, _('do not display the saved mtime')),
2167 2167 ('', 'datesort', None, _('sort by saved mtime'))],
2168 2168 _('[OPTION]...'))
2169 2169 def debugstate(ui, repo, nodates=None, datesort=None):
2170 2170 """show the contents of the current dirstate"""
2171 2171 timestr = ""
2172 2172 showdate = not nodates
2173 2173 if datesort:
2174 2174 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2175 2175 else:
2176 2176 keyfunc = None # sort by filename
2177 2177 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2178 2178 if showdate:
2179 2179 if ent[3] == -1:
2180 2180 # Pad or slice to locale representation
2181 2181 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2182 2182 time.localtime(0)))
2183 2183 timestr = 'unset'
2184 2184 timestr = (timestr[:locale_len] +
2185 2185 ' ' * (locale_len - len(timestr)))
2186 2186 else:
2187 2187 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2188 2188 time.localtime(ent[3]))
2189 2189 if ent[1] & 020000:
2190 2190 mode = 'lnk'
2191 2191 else:
2192 2192 mode = '%3o' % (ent[1] & 0777)
2193 2193 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2194 2194 for f in repo.dirstate.copies():
2195 2195 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2196 2196
2197 2197 @command('debugsub',
2198 2198 [('r', 'rev', '',
2199 2199 _('revision to check'), _('REV'))],
2200 2200 _('[-r REV] [REV]'))
2201 2201 def debugsub(ui, repo, rev=None):
2202 2202 ctx = scmutil.revsingle(repo, rev, None)
2203 2203 for k, v in sorted(ctx.substate.items()):
2204 2204 ui.write('path %s\n' % k)
2205 2205 ui.write(' source %s\n' % v[0])
2206 2206 ui.write(' revision %s\n' % v[1])
2207 2207
2208 2208 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'))
2209 2209 def debugwalk(ui, repo, *pats, **opts):
2210 2210 """show how files match on given patterns"""
2211 2211 m = scmutil.match(repo[None], pats, opts)
2212 2212 items = list(repo.walk(m))
2213 2213 if not items:
2214 2214 return
2215 2215 fmt = 'f %%-%ds %%-%ds %%s' % (
2216 2216 max([len(abs) for abs in items]),
2217 2217 max([len(m.rel(abs)) for abs in items]))
2218 2218 for abs in items:
2219 2219 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
2220 2220 ui.write("%s\n" % line.rstrip())
2221 2221
2222 2222 @command('debugwireargs',
2223 2223 [('', 'three', '', 'three'),
2224 2224 ('', 'four', '', 'four'),
2225 2225 ('', 'five', '', 'five'),
2226 2226 ] + remoteopts,
2227 2227 _('REPO [OPTIONS]... [ONE [TWO]]'))
2228 2228 def debugwireargs(ui, repopath, *vals, **opts):
2229 2229 repo = hg.peer(ui, opts, repopath)
2230 2230 for opt in remoteopts:
2231 2231 del opts[opt[1]]
2232 2232 args = {}
2233 2233 for k, v in opts.iteritems():
2234 2234 if v:
2235 2235 args[k] = v
2236 2236 # run twice to check that we don't mess up the stream for the next command
2237 2237 res1 = repo.debugwireargs(*vals, **args)
2238 2238 res2 = repo.debugwireargs(*vals, **args)
2239 2239 ui.write("%s\n" % res1)
2240 2240 if res1 != res2:
2241 2241 ui.warn("%s\n" % res2)
2242 2242
2243 2243 @command('^diff',
2244 2244 [('r', 'rev', [], _('revision'), _('REV')),
2245 2245 ('c', 'change', '', _('change made by revision'), _('REV'))
2246 2246 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2247 2247 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'))
2248 2248 def diff(ui, repo, *pats, **opts):
2249 2249 """diff repository (or selected files)
2250 2250
2251 2251 Show differences between revisions for the specified files.
2252 2252
2253 2253 Differences between files are shown using the unified diff format.
2254 2254
2255 2255 .. note::
2256 2256 diff may generate unexpected results for merges, as it will
2257 2257 default to comparing against the working directory's first
2258 2258 parent changeset if no revisions are specified.
2259 2259
2260 2260 When two revision arguments are given, then changes are shown
2261 2261 between those revisions. If only one revision is specified then
2262 2262 that revision is compared to the working directory, and, when no
2263 2263 revisions are specified, the working directory files are compared
2264 2264 to its parent.
2265 2265
2266 2266 Alternatively you can specify -c/--change with a revision to see
2267 2267 the changes in that changeset relative to its first parent.
2268 2268
2269 2269 Without the -a/--text option, diff will avoid generating diffs of
2270 2270 files it detects as binary. With -a, diff will generate a diff
2271 2271 anyway, probably with undesirable results.
2272 2272
2273 2273 Use the -g/--git option to generate diffs in the git extended diff
2274 2274 format. For more information, read :hg:`help diffs`.
2275 2275
2276 2276 .. container:: verbose
2277 2277
2278 2278 Examples:
2279 2279
2280 2280 - compare a file in the current working directory to its parent::
2281 2281
2282 2282 hg diff foo.c
2283 2283
2284 2284 - compare two historical versions of a directory, with rename info::
2285 2285
2286 2286 hg diff --git -r 1.0:1.2 lib/
2287 2287
2288 2288 - get change stats relative to the last change on some date::
2289 2289
2290 2290 hg diff --stat -r "date('may 2')"
2291 2291
2292 2292 - diff all newly-added files that contain a keyword::
2293 2293
2294 2294 hg diff "set:added() and grep(GNU)"
2295 2295
2296 2296 - compare a revision and its parents::
2297 2297
2298 2298 hg diff -c 9353 # compare against first parent
2299 2299 hg diff -r 9353^:9353 # same using revset syntax
2300 2300 hg diff -r 9353^2:9353 # compare against the second parent
2301 2301
2302 2302 Returns 0 on success.
2303 2303 """
2304 2304
2305 2305 revs = opts.get('rev')
2306 2306 change = opts.get('change')
2307 2307 stat = opts.get('stat')
2308 2308 reverse = opts.get('reverse')
2309 2309
2310 2310 if revs and change:
2311 2311 msg = _('cannot specify --rev and --change at the same time')
2312 2312 raise util.Abort(msg)
2313 2313 elif change:
2314 2314 node2 = scmutil.revsingle(repo, change, None).node()
2315 2315 node1 = repo[node2].p1().node()
2316 2316 else:
2317 2317 node1, node2 = scmutil.revpair(repo, revs)
2318 2318
2319 2319 if reverse:
2320 2320 node1, node2 = node2, node1
2321 2321
2322 2322 diffopts = patch.diffopts(ui, opts)
2323 2323 m = scmutil.match(repo[node2], pats, opts)
2324 2324 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2325 2325 listsubrepos=opts.get('subrepos'))
2326 2326
2327 2327 @command('^export',
2328 2328 [('o', 'output', '',
2329 2329 _('print output to file with formatted name'), _('FORMAT')),
2330 2330 ('', 'switch-parent', None, _('diff against the second parent')),
2331 2331 ('r', 'rev', [], _('revisions to export'), _('REV')),
2332 2332 ] + diffopts,
2333 2333 _('[OPTION]... [-o OUTFILESPEC] REV...'))
2334 2334 def export(ui, repo, *changesets, **opts):
2335 2335 """dump the header and diffs for one or more changesets
2336 2336
2337 2337 Print the changeset header and diffs for one or more revisions.
2338 2338
2339 2339 The information shown in the changeset header is: author, date,
2340 2340 branch name (if non-default), changeset hash, parent(s) and commit
2341 2341 comment.
2342 2342
2343 2343 .. note::
2344 2344 export may generate unexpected diff output for merge
2345 2345 changesets, as it will compare the merge changeset against its
2346 2346 first parent only.
2347 2347
2348 2348 Output may be to a file, in which case the name of the file is
2349 2349 given using a format string. The formatting rules are as follows:
2350 2350
2351 2351 :``%%``: literal "%" character
2352 2352 :``%H``: changeset hash (40 hexadecimal digits)
2353 2353 :``%N``: number of patches being generated
2354 2354 :``%R``: changeset revision number
2355 2355 :``%b``: basename of the exporting repository
2356 2356 :``%h``: short-form changeset hash (12 hexadecimal digits)
2357 2357 :``%m``: first line of the commit message (only alphanumeric characters)
2358 2358 :``%n``: zero-padded sequence number, starting at 1
2359 2359 :``%r``: zero-padded changeset revision number
2360 2360
2361 2361 Without the -a/--text option, export will avoid generating diffs
2362 2362 of files it detects as binary. With -a, export will generate a
2363 2363 diff anyway, probably with undesirable results.
2364 2364
2365 2365 Use the -g/--git option to generate diffs in the git extended diff
2366 2366 format. See :hg:`help diffs` for more information.
2367 2367
2368 2368 With the --switch-parent option, the diff will be against the
2369 2369 second parent. It can be useful to review a merge.
2370 2370
2371 2371 .. container:: verbose
2372 2372
2373 2373 Examples:
2374 2374
2375 2375 - use export and import to transplant a bugfix to the current
2376 2376 branch::
2377 2377
2378 2378 hg export -r 9353 | hg import -
2379 2379
2380 2380 - export all the changesets between two revisions to a file with
2381 2381 rename information::
2382 2382
2383 2383 hg export --git -r 123:150 > changes.txt
2384 2384
2385 2385 - split outgoing changes into a series of patches with
2386 2386 descriptive names::
2387 2387
2388 2388 hg export -r "outgoing()" -o "%n-%m.patch"
2389 2389
2390 2390 Returns 0 on success.
2391 2391 """
2392 2392 changesets += tuple(opts.get('rev', []))
2393 2393 if not changesets:
2394 2394 raise util.Abort(_("export requires at least one changeset"))
2395 2395 revs = scmutil.revrange(repo, changesets)
2396 2396 if len(revs) > 1:
2397 2397 ui.note(_('exporting patches:\n'))
2398 2398 else:
2399 2399 ui.note(_('exporting patch:\n'))
2400 2400 cmdutil.export(repo, revs, template=opts.get('output'),
2401 2401 switch_parent=opts.get('switch_parent'),
2402 2402 opts=patch.diffopts(ui, opts))
2403 2403
2404 2404 @command('^forget', walkopts, _('[OPTION]... FILE...'))
2405 2405 def forget(ui, repo, *pats, **opts):
2406 2406 """forget the specified files on the next commit
2407 2407
2408 2408 Mark the specified files so they will no longer be tracked
2409 2409 after the next commit.
2410 2410
2411 2411 This only removes files from the current branch, not from the
2412 2412 entire project history, and it does not delete them from the
2413 2413 working directory.
2414 2414
2415 2415 To undo a forget before the next commit, see :hg:`add`.
2416 2416
2417 2417 .. container:: verbose
2418 2418
2419 2419 Examples:
2420 2420
2421 2421 - forget newly-added binary files::
2422 2422
2423 2423 hg forget "set:added() and binary()"
2424 2424
2425 2425 - forget files that would be excluded by .hgignore::
2426 2426
2427 2427 hg forget "set:hgignore()"
2428 2428
2429 2429 Returns 0 on success.
2430 2430 """
2431 2431
2432 2432 if not pats:
2433 2433 raise util.Abort(_('no files specified'))
2434 2434
2435 2435 m = scmutil.match(repo[None], pats, opts)
2436 2436 s = repo.status(match=m, clean=True)
2437 2437 forget = sorted(s[0] + s[1] + s[3] + s[6])
2438 2438 errs = 0
2439 2439
2440 2440 for f in m.files():
2441 2441 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2442 2442 if os.path.exists(m.rel(f)):
2443 2443 ui.warn(_('not removing %s: file is already untracked\n')
2444 2444 % m.rel(f))
2445 2445 errs = 1
2446 2446
2447 2447 for f in forget:
2448 2448 if ui.verbose or not m.exact(f):
2449 2449 ui.status(_('removing %s\n') % m.rel(f))
2450 2450
2451 2451 repo[None].forget(forget)
2452 2452 return errs
2453 2453
2454 2454 @command(
2455 2455 'graft',
2456 2456 [('c', 'continue', False, _('resume interrupted graft')),
2457 2457 ('e', 'edit', False, _('invoke editor on commit messages')),
2458 2458 ('D', 'currentdate', False,
2459 2459 _('record the current date as commit date')),
2460 2460 ('U', 'currentuser', False,
2461 2461 _('record the current user as committer'), _('DATE'))]
2462 2462 + commitopts2 + mergetoolopts,
2463 2463 _('[OPTION]... REVISION...'))
2464 2464 def graft(ui, repo, *revs, **opts):
2465 2465 '''copy changes from other branches onto the current branch
2466 2466
2467 2467 This command uses Mercurial's merge logic to copy individual
2468 2468 changes from other branches without merging branches in the
2469 2469 history graph. This is sometimes known as 'backporting' or
2470 2470 'cherry-picking'. By default, graft will copy user, date, and
2471 2471 description from the source changesets.
2472 2472
2473 2473 Changesets that are ancestors of the current revision, that have
2474 2474 already been grafted, or that are merges will be skipped.
2475 2475
2476 2476 If a graft merge results in conflicts, the graft process is
2477 2477 aborted so that the current merge can be manually resolved. Once
2478 2478 all conflicts are addressed, the graft process can be continued
2479 2479 with the -c/--continue option.
2480 2480
2481 2481 .. note::
2482 2482 The -c/--continue option does not reapply earlier options.
2483 2483
2484 2484 .. container:: verbose
2485 2485
2486 2486 Examples:
2487 2487
2488 2488 - copy a single change to the stable branch and edit its description::
2489 2489
2490 2490 hg update stable
2491 2491 hg graft --edit 9393
2492 2492
2493 2493 - graft a range of changesets with one exception, updating dates::
2494 2494
2495 2495 hg graft -D "2085::2093 and not 2091"
2496 2496
2497 2497 - continue a graft after resolving conflicts::
2498 2498
2499 2499 hg graft -c
2500 2500
2501 2501 - show the source of a grafted changeset::
2502 2502
2503 2503 hg log --debug -r tip
2504 2504
2505 2505 Returns 0 on successful completion.
2506 2506 '''
2507 2507
2508 2508 if not opts.get('user') and opts.get('currentuser'):
2509 2509 opts['user'] = ui.username()
2510 2510 if not opts.get('date') and opts.get('currentdate'):
2511 2511 opts['date'] = "%d %d" % util.makedate()
2512 2512
2513 2513 editor = None
2514 2514 if opts.get('edit'):
2515 2515 editor = cmdutil.commitforceeditor
2516 2516
2517 2517 cont = False
2518 2518 if opts['continue']:
2519 2519 cont = True
2520 2520 if revs:
2521 2521 raise util.Abort(_("can't specify --continue and revisions"))
2522 2522 # read in unfinished revisions
2523 2523 try:
2524 2524 nodes = repo.opener.read('graftstate').splitlines()
2525 2525 revs = [repo[node].rev() for node in nodes]
2526 2526 except IOError, inst:
2527 2527 if inst.errno != errno.ENOENT:
2528 2528 raise
2529 2529 raise util.Abort(_("no graft state found, can't continue"))
2530 2530 else:
2531 2531 cmdutil.bailifchanged(repo)
2532 2532 if not revs:
2533 2533 raise util.Abort(_('no revisions specified'))
2534 2534 revs = scmutil.revrange(repo, revs)
2535 2535
2536 2536 # check for merges
2537 2537 for ctx in repo.set('%ld and merge()', revs):
2538 2538 ui.warn(_('skipping ungraftable merge revision %s\n') % ctx.rev())
2539 2539 revs.remove(ctx.rev())
2540 2540 if not revs:
2541 2541 return -1
2542 2542
2543 2543 # check for ancestors of dest branch
2544 2544 for ctx in repo.set('::. and %ld', revs):
2545 2545 ui.warn(_('skipping ancestor revision %s\n') % ctx.rev())
2546 2546 revs.remove(ctx.rev())
2547 2547 if not revs:
2548 2548 return -1
2549 2549
2550 2550 # check ancestors for earlier grafts
2551 2551 ui.debug('scanning for existing transplants')
2552 2552 for ctx in repo.set("::. - ::%ld", revs):
2553 2553 n = ctx.extra().get('source')
2554 2554 if n and n in repo:
2555 2555 r = repo[n].rev()
2556 2556 ui.warn(_('skipping already grafted revision %s\n') % r)
2557 2557 revs.remove(r)
2558 2558 if not revs:
2559 2559 return -1
2560 2560
2561 2561 for pos, ctx in enumerate(repo.set("%ld", revs)):
2562 2562 current = repo['.']
2563 2563 ui.status('grafting revision %s', ctx.rev())
2564 2564
2565 2565 # we don't merge the first commit when continuing
2566 2566 if not cont:
2567 2567 # perform the graft merge with p1(rev) as 'ancestor'
2568 2568 try:
2569 2569 # ui.forcemerge is an internal variable, do not document
2570 2570 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2571 2571 stats = mergemod.update(repo, ctx.node(), True, True, False,
2572 2572 ctx.p1().node())
2573 2573 finally:
2574 2574 ui.setconfig('ui', 'forcemerge', '')
2575 2575 # drop the second merge parent
2576 2576 repo.dirstate.setparents(current.node(), nullid)
2577 2577 repo.dirstate.write()
2578 2578 # fix up dirstate for copies and renames
2579 2579 cmdutil.duplicatecopies(repo, ctx.rev(), current.node(), nullid)
2580 2580 # report any conflicts
2581 2581 if stats and stats[3] > 0:
2582 2582 # write out state for --continue
2583 2583 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2584 2584 repo.opener.write('graftstate', ''.join(nodelines))
2585 2585 raise util.Abort(
2586 2586 _("unresolved conflicts, can't continue"),
2587 2587 hint=_('use hg resolve and hg graft --continue'))
2588 2588 else:
2589 2589 cont = False
2590 2590
2591 2591 # commit
2592 2592 extra = {'source': ctx.hex()}
2593 2593 user = ctx.user()
2594 2594 if opts.get('user'):
2595 2595 user = opts['user']
2596 2596 date = ctx.date()
2597 2597 if opts.get('date'):
2598 2598 date = opts['date']
2599 2599 repo.commit(text=ctx.description(), user=user,
2600 2600 date=date, extra=extra, editor=editor)
2601 2601
2602 2602 # remove state when we complete successfully
2603 2603 if os.path.exists(repo.join('graftstate')):
2604 2604 util.unlinkpath(repo.join('graftstate'))
2605 2605
2606 2606 return 0
2607 2607
2608 2608 @command('grep',
2609 2609 [('0', 'print0', None, _('end fields with NUL')),
2610 2610 ('', 'all', None, _('print all revisions that match')),
2611 2611 ('a', 'text', None, _('treat all files as text')),
2612 2612 ('f', 'follow', None,
2613 2613 _('follow changeset history,'
2614 2614 ' or file history across copies and renames')),
2615 2615 ('i', 'ignore-case', None, _('ignore case when matching')),
2616 2616 ('l', 'files-with-matches', None,
2617 2617 _('print only filenames and revisions that match')),
2618 2618 ('n', 'line-number', None, _('print matching line numbers')),
2619 2619 ('r', 'rev', [],
2620 2620 _('only search files changed within revision range'), _('REV')),
2621 2621 ('u', 'user', None, _('list the author (long with -v)')),
2622 2622 ('d', 'date', None, _('list the date (short with -q)')),
2623 2623 ] + walkopts,
2624 2624 _('[OPTION]... PATTERN [FILE]...'))
2625 2625 def grep(ui, repo, pattern, *pats, **opts):
2626 2626 """search for a pattern in specified files and revisions
2627 2627
2628 2628 Search revisions of files for a regular expression.
2629 2629
2630 2630 This command behaves differently than Unix grep. It only accepts
2631 2631 Python/Perl regexps. It searches repository history, not the
2632 2632 working directory. It always prints the revision number in which a
2633 2633 match appears.
2634 2634
2635 2635 By default, grep only prints output for the first revision of a
2636 2636 file in which it finds a match. To get it to print every revision
2637 2637 that contains a change in match status ("-" for a match that
2638 2638 becomes a non-match, or "+" for a non-match that becomes a match),
2639 2639 use the --all flag.
2640 2640
2641 2641 Returns 0 if a match is found, 1 otherwise.
2642 2642 """
2643 2643 reflags = 0
2644 2644 if opts.get('ignore_case'):
2645 2645 reflags |= re.I
2646 2646 try:
2647 2647 regexp = re.compile(pattern, reflags)
2648 2648 except re.error, inst:
2649 2649 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2650 2650 return 1
2651 2651 sep, eol = ':', '\n'
2652 2652 if opts.get('print0'):
2653 2653 sep = eol = '\0'
2654 2654
2655 2655 getfile = util.lrucachefunc(repo.file)
2656 2656
2657 2657 def matchlines(body):
2658 2658 begin = 0
2659 2659 linenum = 0
2660 2660 while True:
2661 2661 match = regexp.search(body, begin)
2662 2662 if not match:
2663 2663 break
2664 2664 mstart, mend = match.span()
2665 2665 linenum += body.count('\n', begin, mstart) + 1
2666 2666 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2667 2667 begin = body.find('\n', mend) + 1 or len(body) + 1
2668 2668 lend = begin - 1
2669 2669 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2670 2670
2671 2671 class linestate(object):
2672 2672 def __init__(self, line, linenum, colstart, colend):
2673 2673 self.line = line
2674 2674 self.linenum = linenum
2675 2675 self.colstart = colstart
2676 2676 self.colend = colend
2677 2677
2678 2678 def __hash__(self):
2679 2679 return hash((self.linenum, self.line))
2680 2680
2681 2681 def __eq__(self, other):
2682 2682 return self.line == other.line
2683 2683
2684 2684 matches = {}
2685 2685 copies = {}
2686 2686 def grepbody(fn, rev, body):
2687 2687 matches[rev].setdefault(fn, [])
2688 2688 m = matches[rev][fn]
2689 2689 for lnum, cstart, cend, line in matchlines(body):
2690 2690 s = linestate(line, lnum, cstart, cend)
2691 2691 m.append(s)
2692 2692
2693 2693 def difflinestates(a, b):
2694 2694 sm = difflib.SequenceMatcher(None, a, b)
2695 2695 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2696 2696 if tag == 'insert':
2697 2697 for i in xrange(blo, bhi):
2698 2698 yield ('+', b[i])
2699 2699 elif tag == 'delete':
2700 2700 for i in xrange(alo, ahi):
2701 2701 yield ('-', a[i])
2702 2702 elif tag == 'replace':
2703 2703 for i in xrange(alo, ahi):
2704 2704 yield ('-', a[i])
2705 2705 for i in xrange(blo, bhi):
2706 2706 yield ('+', b[i])
2707 2707
2708 2708 def display(fn, ctx, pstates, states):
2709 2709 rev = ctx.rev()
2710 2710 datefunc = ui.quiet and util.shortdate or util.datestr
2711 2711 found = False
2712 2712 filerevmatches = {}
2713 2713 def binary():
2714 2714 flog = getfile(fn)
2715 2715 return util.binary(flog.read(ctx.filenode(fn)))
2716 2716
2717 2717 if opts.get('all'):
2718 2718 iter = difflinestates(pstates, states)
2719 2719 else:
2720 2720 iter = [('', l) for l in states]
2721 2721 for change, l in iter:
2722 2722 cols = [fn, str(rev)]
2723 2723 before, match, after = None, None, None
2724 2724 if opts.get('line_number'):
2725 2725 cols.append(str(l.linenum))
2726 2726 if opts.get('all'):
2727 2727 cols.append(change)
2728 2728 if opts.get('user'):
2729 2729 cols.append(ui.shortuser(ctx.user()))
2730 2730 if opts.get('date'):
2731 2731 cols.append(datefunc(ctx.date()))
2732 2732 if opts.get('files_with_matches'):
2733 2733 c = (fn, rev)
2734 2734 if c in filerevmatches:
2735 2735 continue
2736 2736 filerevmatches[c] = 1
2737 2737 else:
2738 2738 before = l.line[:l.colstart]
2739 2739 match = l.line[l.colstart:l.colend]
2740 2740 after = l.line[l.colend:]
2741 2741 ui.write(sep.join(cols))
2742 2742 if before is not None:
2743 2743 if not opts.get('text') and binary():
2744 2744 ui.write(sep + " Binary file matches")
2745 2745 else:
2746 2746 ui.write(sep + before)
2747 2747 ui.write(match, label='grep.match')
2748 2748 ui.write(after)
2749 2749 ui.write(eol)
2750 2750 found = True
2751 2751 return found
2752 2752
2753 2753 skip = {}
2754 2754 revfiles = {}
2755 2755 matchfn = scmutil.match(repo[None], pats, opts)
2756 2756 found = False
2757 2757 follow = opts.get('follow')
2758 2758
2759 2759 def prep(ctx, fns):
2760 2760 rev = ctx.rev()
2761 2761 pctx = ctx.p1()
2762 2762 parent = pctx.rev()
2763 2763 matches.setdefault(rev, {})
2764 2764 matches.setdefault(parent, {})
2765 2765 files = revfiles.setdefault(rev, [])
2766 2766 for fn in fns:
2767 2767 flog = getfile(fn)
2768 2768 try:
2769 2769 fnode = ctx.filenode(fn)
2770 2770 except error.LookupError:
2771 2771 continue
2772 2772
2773 2773 copied = flog.renamed(fnode)
2774 2774 copy = follow and copied and copied[0]
2775 2775 if copy:
2776 2776 copies.setdefault(rev, {})[fn] = copy
2777 2777 if fn in skip:
2778 2778 if copy:
2779 2779 skip[copy] = True
2780 2780 continue
2781 2781 files.append(fn)
2782 2782
2783 2783 if fn not in matches[rev]:
2784 2784 grepbody(fn, rev, flog.read(fnode))
2785 2785
2786 2786 pfn = copy or fn
2787 2787 if pfn not in matches[parent]:
2788 2788 try:
2789 2789 fnode = pctx.filenode(pfn)
2790 2790 grepbody(pfn, parent, flog.read(fnode))
2791 2791 except error.LookupError:
2792 2792 pass
2793 2793
2794 2794 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2795 2795 rev = ctx.rev()
2796 2796 parent = ctx.p1().rev()
2797 2797 for fn in sorted(revfiles.get(rev, [])):
2798 2798 states = matches[rev][fn]
2799 2799 copy = copies.get(rev, {}).get(fn)
2800 2800 if fn in skip:
2801 2801 if copy:
2802 2802 skip[copy] = True
2803 2803 continue
2804 2804 pstates = matches.get(parent, {}).get(copy or fn, [])
2805 2805 if pstates or states:
2806 2806 r = display(fn, ctx, pstates, states)
2807 2807 found = found or r
2808 2808 if r and not opts.get('all'):
2809 2809 skip[fn] = True
2810 2810 if copy:
2811 2811 skip[copy] = True
2812 2812 del matches[rev]
2813 2813 del revfiles[rev]
2814 2814
2815 2815 return not found
2816 2816
2817 2817 @command('heads',
2818 2818 [('r', 'rev', '',
2819 2819 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2820 2820 ('t', 'topo', False, _('show topological heads only')),
2821 2821 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2822 2822 ('c', 'closed', False, _('show normal and closed branch heads')),
2823 2823 ] + templateopts,
2824 2824 _('[-ac] [-r STARTREV] [REV]...'))
2825 2825 def heads(ui, repo, *branchrevs, **opts):
2826 2826 """show current repository heads or show branch heads
2827 2827
2828 2828 With no arguments, show all repository branch heads.
2829 2829
2830 2830 Repository "heads" are changesets with no child changesets. They are
2831 2831 where development generally takes place and are the usual targets
2832 2832 for update and merge operations. Branch heads are changesets that have
2833 2833 no child changeset on the same branch.
2834 2834
2835 2835 If one or more REVs are given, only branch heads on the branches
2836 2836 associated with the specified changesets are shown. This means
2837 2837 that you can use :hg:`heads foo` to see the heads on a branch
2838 2838 named ``foo``.
2839 2839
2840 2840 If -c/--closed is specified, also show branch heads marked closed
2841 2841 (see :hg:`commit --close-branch`).
2842 2842
2843 2843 If STARTREV is specified, only those heads that are descendants of
2844 2844 STARTREV will be displayed.
2845 2845
2846 2846 If -t/--topo is specified, named branch mechanics will be ignored and only
2847 2847 changesets without children will be shown.
2848 2848
2849 2849 Returns 0 if matching heads are found, 1 if not.
2850 2850 """
2851 2851
2852 2852 start = None
2853 2853 if 'rev' in opts:
2854 2854 start = scmutil.revsingle(repo, opts['rev'], None).node()
2855 2855
2856 2856 if opts.get('topo'):
2857 2857 heads = [repo[h] for h in repo.heads(start)]
2858 2858 else:
2859 2859 heads = []
2860 2860 for branch in repo.branchmap():
2861 2861 heads += repo.branchheads(branch, start, opts.get('closed'))
2862 2862 heads = [repo[h] for h in heads]
2863 2863
2864 2864 if branchrevs:
2865 2865 branches = set(repo[br].branch() for br in branchrevs)
2866 2866 heads = [h for h in heads if h.branch() in branches]
2867 2867
2868 2868 if opts.get('active') and branchrevs:
2869 2869 dagheads = repo.heads(start)
2870 2870 heads = [h for h in heads if h.node() in dagheads]
2871 2871
2872 2872 if branchrevs:
2873 2873 haveheads = set(h.branch() for h in heads)
2874 2874 if branches - haveheads:
2875 2875 headless = ', '.join(b for b in branches - haveheads)
2876 2876 msg = _('no open branch heads found on branches %s')
2877 2877 if opts.get('rev'):
2878 2878 msg += _(' (started at %s)' % opts['rev'])
2879 2879 ui.warn((msg + '\n') % headless)
2880 2880
2881 2881 if not heads:
2882 2882 return 1
2883 2883
2884 2884 heads = sorted(heads, key=lambda x: -x.rev())
2885 2885 displayer = cmdutil.show_changeset(ui, repo, opts)
2886 2886 for ctx in heads:
2887 2887 displayer.show(ctx)
2888 2888 displayer.close()
2889 2889
2890 2890 @command('help',
2891 2891 [('e', 'extension', None, _('show only help for extensions')),
2892 2892 ('c', 'command', None, _('show only help for commands'))],
2893 2893 _('[-ec] [TOPIC]'))
2894 2894 def help_(ui, name=None, unknowncmd=False, full=True, **opts):
2895 2895 """show help for a given topic or a help overview
2896 2896
2897 2897 With no arguments, print a list of commands with short help messages.
2898 2898
2899 2899 Given a topic, extension, or command name, print help for that
2900 2900 topic.
2901 2901
2902 2902 Returns 0 if successful.
2903 2903 """
2904 2904
2905 2905 textwidth = min(ui.termwidth(), 80) - 2
2906 2906
2907 2907 def optrst(options):
2908 2908 data = []
2909 2909 multioccur = False
2910 2910 for option in options:
2911 2911 if len(option) == 5:
2912 2912 shortopt, longopt, default, desc, optlabel = option
2913 2913 else:
2914 2914 shortopt, longopt, default, desc = option
2915 2915 optlabel = _("VALUE") # default label
2916 2916
2917 2917 if _("DEPRECATED") in desc and not ui.verbose:
2918 2918 continue
2919 2919
2920 2920 so = ''
2921 2921 if shortopt:
2922 2922 so = '-' + shortopt
2923 2923 lo = '--' + longopt
2924 2924 if default:
2925 2925 desc += _(" (default: %s)") % default
2926 2926
2927 2927 if isinstance(default, list):
2928 2928 lo += " %s [+]" % optlabel
2929 2929 multioccur = True
2930 2930 elif (default is not None) and not isinstance(default, bool):
2931 2931 lo += " %s" % optlabel
2932 2932
2933 2933 data.append((so, lo, desc))
2934 2934
2935 2935 rst = minirst.maketable(data, 1)
2936 2936
2937 2937 if multioccur:
2938 2938 rst += _("\n[+] marked option can be specified multiple times\n")
2939 2939
2940 2940 return rst
2941 2941
2942 2942 # list all option lists
2943 2943 def opttext(optlist, width):
2944 2944 rst = ''
2945 2945 if not optlist:
2946 2946 return ''
2947 2947
2948 2948 for title, options in optlist:
2949 2949 rst += '\n%s\n' % title
2950 2950 if options:
2951 2951 rst += "\n"
2952 2952 rst += optrst(options)
2953 2953 rst += '\n'
2954 2954
2955 2955 return '\n' + minirst.format(rst, width)
2956 2956
2957 2957 def addglobalopts(optlist, aliases):
2958 2958 if ui.quiet:
2959 2959 return []
2960 2960
2961 2961 if ui.verbose:
2962 2962 optlist.append((_("global options:"), globalopts))
2963 2963 if name == 'shortlist':
2964 2964 optlist.append((_('use "hg help" for the full list '
2965 2965 'of commands'), ()))
2966 2966 else:
2967 2967 if name == 'shortlist':
2968 2968 msg = _('use "hg help" for the full list of commands '
2969 2969 'or "hg -v" for details')
2970 2970 elif name and not full:
2971 2971 msg = _('use "hg help %s" to show the full help text' % name)
2972 2972 elif aliases:
2973 2973 msg = _('use "hg -v help%s" to show builtin aliases and '
2974 2974 'global options') % (name and " " + name or "")
2975 2975 else:
2976 2976 msg = _('use "hg -v help %s" to show more info') % name
2977 2977 optlist.append((msg, ()))
2978 2978
2979 2979 def helpcmd(name):
2980 2980 try:
2981 2981 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2982 2982 except error.AmbiguousCommand, inst:
2983 2983 # py3k fix: except vars can't be used outside the scope of the
2984 2984 # except block, nor can be used inside a lambda. python issue4617
2985 2985 prefix = inst.args[0]
2986 2986 select = lambda c: c.lstrip('^').startswith(prefix)
2987 2987 helplist(select)
2988 2988 return
2989 2989
2990 2990 # check if it's an invalid alias and display its error if it is
2991 2991 if getattr(entry[0], 'badalias', False):
2992 2992 if not unknowncmd:
2993 2993 entry[0](ui)
2994 2994 return
2995 2995
2996 2996 rst = ""
2997 2997
2998 2998 # synopsis
2999 2999 if len(entry) > 2:
3000 3000 if entry[2].startswith('hg'):
3001 3001 rst += "%s\n" % entry[2]
3002 3002 else:
3003 3003 rst += 'hg %s %s\n' % (aliases[0], entry[2])
3004 3004 else:
3005 3005 rst += 'hg %s\n' % aliases[0]
3006 3006
3007 3007 # aliases
3008 3008 if full and not ui.quiet and len(aliases) > 1:
3009 3009 rst += _("\naliases: %s\n") % ', '.join(aliases[1:])
3010 3010
3011 3011 # description
3012 3012 doc = gettext(entry[0].__doc__)
3013 3013 if not doc:
3014 3014 doc = _("(no help text available)")
3015 3015 if util.safehasattr(entry[0], 'definition'): # aliased command
3016 3016 if entry[0].definition.startswith('!'): # shell alias
3017 3017 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
3018 3018 else:
3019 3019 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
3020 3020 if ui.quiet or not full:
3021 3021 doc = doc.splitlines()[0]
3022 3022 rst += "\n" + doc + "\n"
3023 3023
3024 3024 # check if this command shadows a non-trivial (multi-line)
3025 3025 # extension help text
3026 3026 try:
3027 3027 mod = extensions.find(name)
3028 3028 doc = gettext(mod.__doc__) or ''
3029 3029 if '\n' in doc.strip():
3030 3030 msg = _('use "hg help -e %s" to show help for '
3031 3031 'the %s extension') % (name, name)
3032 3032 rst += '\n%s\n' % msg
3033 3033 except KeyError:
3034 3034 pass
3035 3035
3036 3036 # options
3037 3037 if not ui.quiet and entry[1]:
3038 3038 rst += '\noptions:\n\n'
3039 3039 rst += optrst(entry[1])
3040 3040
3041 3041 if ui.verbose:
3042 3042 rst += '\nglobal options:\n\n'
3043 3043 rst += optrst(globalopts)
3044 3044
3045 3045 keep = ui.verbose and ['verbose'] or []
3046 3046 formatted, pruned = minirst.format(rst, textwidth, keep=keep)
3047 3047 ui.write(formatted)
3048 3048
3049 3049 if not ui.verbose:
3050 3050 if not full:
3051 3051 ui.write(_('\nuse "hg help %s" to show the full help text\n')
3052 3052 % name)
3053 3053 elif not ui.quiet:
3054 3054 ui.write(_('\nuse "hg -v help %s" to show more info\n') % name)
3055 3055
3056 3056
3057 3057 def helplist(select=None):
3058 3058 # list of commands
3059 3059 if name == "shortlist":
3060 3060 header = _('basic commands:\n\n')
3061 3061 else:
3062 3062 header = _('list of commands:\n\n')
3063 3063
3064 3064 h = {}
3065 3065 cmds = {}
3066 3066 for c, e in table.iteritems():
3067 3067 f = c.split("|", 1)[0]
3068 3068 if select and not select(f):
3069 3069 continue
3070 3070 if (not select and name != 'shortlist' and
3071 3071 e[0].__module__ != __name__):
3072 3072 continue
3073 3073 if name == "shortlist" and not f.startswith("^"):
3074 3074 continue
3075 3075 f = f.lstrip("^")
3076 3076 if not ui.debugflag and f.startswith("debug"):
3077 3077 continue
3078 3078 doc = e[0].__doc__
3079 3079 if doc and 'DEPRECATED' in doc and not ui.verbose:
3080 3080 continue
3081 3081 doc = gettext(doc)
3082 3082 if not doc:
3083 3083 doc = _("(no help text available)")
3084 3084 h[f] = doc.splitlines()[0].rstrip()
3085 3085 cmds[f] = c.lstrip("^")
3086 3086
3087 3087 if not h:
3088 3088 ui.status(_('no commands defined\n'))
3089 3089 return
3090 3090
3091 3091 ui.status(header)
3092 3092 fns = sorted(h)
3093 3093 m = max(map(len, fns))
3094 3094 for f in fns:
3095 3095 if ui.verbose:
3096 3096 commands = cmds[f].replace("|",", ")
3097 3097 ui.write(" %s:\n %s\n"%(commands, h[f]))
3098 3098 else:
3099 3099 ui.write('%s\n' % (util.wrap(h[f], textwidth,
3100 3100 initindent=' %-*s ' % (m, f),
3101 3101 hangindent=' ' * (m + 4))))
3102 3102
3103 3103 if not name:
3104 3104 text = help.listexts(_('enabled extensions:'), extensions.enabled())
3105 3105 if text:
3106 3106 ui.write("\n%s" % minirst.format(text, textwidth))
3107 3107
3108 3108 ui.write(_("\nadditional help topics:\n\n"))
3109 3109 topics = []
3110 3110 for names, header, doc in help.helptable:
3111 3111 topics.append((sorted(names, key=len, reverse=True)[0], header))
3112 3112 topics_len = max([len(s[0]) for s in topics])
3113 3113 for t, desc in topics:
3114 3114 ui.write(" %-*s %s\n" % (topics_len, t, desc))
3115 3115
3116 3116 optlist = []
3117 3117 addglobalopts(optlist, True)
3118 3118 ui.write(opttext(optlist, textwidth))
3119 3119
3120 3120 def helptopic(name):
3121 3121 for names, header, doc in help.helptable:
3122 3122 if name in names:
3123 3123 break
3124 3124 else:
3125 3125 raise error.UnknownCommand(name)
3126 3126
3127 3127 # description
3128 3128 if not doc:
3129 3129 doc = _("(no help text available)")
3130 3130 if util.safehasattr(doc, '__call__'):
3131 3131 doc = doc()
3132 3132
3133 3133 ui.write("%s\n\n" % header)
3134 3134 ui.write("%s" % minirst.format(doc, textwidth, indent=4))
3135 3135 try:
3136 3136 cmdutil.findcmd(name, table)
3137 3137 ui.write(_('\nuse "hg help -c %s" to see help for '
3138 3138 'the %s command\n') % (name, name))
3139 3139 except error.UnknownCommand:
3140 3140 pass
3141 3141
3142 3142 def helpext(name):
3143 3143 try:
3144 3144 mod = extensions.find(name)
3145 3145 doc = gettext(mod.__doc__) or _('no help text available')
3146 3146 except KeyError:
3147 3147 mod = None
3148 3148 doc = extensions.disabledext(name)
3149 3149 if not doc:
3150 3150 raise error.UnknownCommand(name)
3151 3151
3152 3152 if '\n' not in doc:
3153 3153 head, tail = doc, ""
3154 3154 else:
3155 3155 head, tail = doc.split('\n', 1)
3156 3156 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
3157 3157 if tail:
3158 3158 ui.write(minirst.format(tail, textwidth))
3159 3159 ui.status('\n')
3160 3160
3161 3161 if mod:
3162 3162 try:
3163 3163 ct = mod.cmdtable
3164 3164 except AttributeError:
3165 3165 ct = {}
3166 3166 modcmds = set([c.split('|', 1)[0] for c in ct])
3167 3167 helplist(modcmds.__contains__)
3168 3168 else:
3169 3169 ui.write(_('use "hg help extensions" for information on enabling '
3170 3170 'extensions\n'))
3171 3171
3172 3172 def helpextcmd(name):
3173 3173 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
3174 3174 doc = gettext(mod.__doc__).splitlines()[0]
3175 3175
3176 3176 msg = help.listexts(_("'%s' is provided by the following "
3177 3177 "extension:") % cmd, {ext: doc}, indent=4)
3178 3178 ui.write(minirst.format(msg, textwidth))
3179 3179 ui.write('\n')
3180 3180 ui.write(_('use "hg help extensions" for information on enabling '
3181 3181 'extensions\n'))
3182 3182
3183 3183 if name and name != 'shortlist':
3184 3184 i = None
3185 3185 if unknowncmd:
3186 3186 queries = (helpextcmd,)
3187 3187 elif opts.get('extension'):
3188 3188 queries = (helpext,)
3189 3189 elif opts.get('command'):
3190 3190 queries = (helpcmd,)
3191 3191 else:
3192 3192 queries = (helptopic, helpcmd, helpext, helpextcmd)
3193 3193 for f in queries:
3194 3194 try:
3195 3195 f(name)
3196 3196 i = None
3197 3197 break
3198 3198 except error.UnknownCommand, inst:
3199 3199 i = inst
3200 3200 if i:
3201 3201 raise i
3202 3202 else:
3203 3203 # program name
3204 3204 ui.status(_("Mercurial Distributed SCM\n"))
3205 3205 ui.status('\n')
3206 3206 helplist()
3207 3207
3208 3208
3209 3209 @command('identify|id',
3210 3210 [('r', 'rev', '',
3211 3211 _('identify the specified revision'), _('REV')),
3212 3212 ('n', 'num', None, _('show local revision number')),
3213 3213 ('i', 'id', None, _('show global revision id')),
3214 3214 ('b', 'branch', None, _('show branch')),
3215 3215 ('t', 'tags', None, _('show tags')),
3216 3216 ('B', 'bookmarks', None, _('show bookmarks'))],
3217 3217 _('[-nibtB] [-r REV] [SOURCE]'))
3218 3218 def identify(ui, repo, source=None, rev=None,
3219 3219 num=None, id=None, branch=None, tags=None, bookmarks=None):
3220 3220 """identify the working copy or specified revision
3221 3221
3222 3222 Print a summary identifying the repository state at REV using one or
3223 3223 two parent hash identifiers, followed by a "+" if the working
3224 3224 directory has uncommitted changes, the branch name (if not default),
3225 3225 a list of tags, and a list of bookmarks.
3226 3226
3227 3227 When REV is not given, print a summary of the current state of the
3228 3228 repository.
3229 3229
3230 3230 Specifying a path to a repository root or Mercurial bundle will
3231 3231 cause lookup to operate on that repository/bundle.
3232 3232
3233 3233 .. container:: verbose
3234 3234
3235 3235 Examples:
3236 3236
3237 3237 - generate a build identifier for the working directory::
3238 3238
3239 3239 hg id --id > build-id.dat
3240 3240
3241 3241 - find the revision corresponding to a tag::
3242 3242
3243 3243 hg id -n -r 1.3
3244 3244
3245 3245 - check the most recent revision of a remote repository::
3246 3246
3247 3247 hg id -r tip http://selenic.com/hg/
3248 3248
3249 3249 Returns 0 if successful.
3250 3250 """
3251 3251
3252 3252 if not repo and not source:
3253 3253 raise util.Abort(_("there is no Mercurial repository here "
3254 3254 "(.hg not found)"))
3255 3255
3256 3256 hexfunc = ui.debugflag and hex or short
3257 3257 default = not (num or id or branch or tags or bookmarks)
3258 3258 output = []
3259 3259 revs = []
3260 3260
3261 3261 if source:
3262 3262 source, branches = hg.parseurl(ui.expandpath(source))
3263 3263 repo = hg.peer(ui, {}, source)
3264 3264 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3265 3265
3266 3266 if not repo.local():
3267 3267 if num or branch or tags:
3268 3268 raise util.Abort(
3269 3269 _("can't query remote revision number, branch, or tags"))
3270 3270 if not rev and revs:
3271 3271 rev = revs[0]
3272 3272 if not rev:
3273 3273 rev = "tip"
3274 3274
3275 3275 remoterev = repo.lookup(rev)
3276 3276 if default or id:
3277 3277 output = [hexfunc(remoterev)]
3278 3278
3279 3279 def getbms():
3280 3280 bms = []
3281 3281
3282 3282 if 'bookmarks' in repo.listkeys('namespaces'):
3283 3283 hexremoterev = hex(remoterev)
3284 3284 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
3285 3285 if bmr == hexremoterev]
3286 3286
3287 3287 return bms
3288 3288
3289 3289 if bookmarks:
3290 3290 output.extend(getbms())
3291 3291 elif default and not ui.quiet:
3292 3292 # multiple bookmarks for a single parent separated by '/'
3293 3293 bm = '/'.join(getbms())
3294 3294 if bm:
3295 3295 output.append(bm)
3296 3296 else:
3297 3297 if not rev:
3298 3298 ctx = repo[None]
3299 3299 parents = ctx.parents()
3300 3300 changed = ""
3301 3301 if default or id or num:
3302 3302 changed = util.any(repo.status()) and "+" or ""
3303 3303 if default or id:
3304 3304 output = ["%s%s" %
3305 3305 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3306 3306 if num:
3307 3307 output.append("%s%s" %
3308 3308 ('+'.join([str(p.rev()) for p in parents]), changed))
3309 3309 else:
3310 3310 ctx = scmutil.revsingle(repo, rev)
3311 3311 if default or id:
3312 3312 output = [hexfunc(ctx.node())]
3313 3313 if num:
3314 3314 output.append(str(ctx.rev()))
3315 3315
3316 3316 if default and not ui.quiet:
3317 3317 b = ctx.branch()
3318 3318 if b != 'default':
3319 3319 output.append("(%s)" % b)
3320 3320
3321 3321 # multiple tags for a single parent separated by '/'
3322 3322 t = '/'.join(ctx.tags())
3323 3323 if t:
3324 3324 output.append(t)
3325 3325
3326 3326 # multiple bookmarks for a single parent separated by '/'
3327 3327 bm = '/'.join(ctx.bookmarks())
3328 3328 if bm:
3329 3329 output.append(bm)
3330 3330 else:
3331 3331 if branch:
3332 3332 output.append(ctx.branch())
3333 3333
3334 3334 if tags:
3335 3335 output.extend(ctx.tags())
3336 3336
3337 3337 if bookmarks:
3338 3338 output.extend(ctx.bookmarks())
3339 3339
3340 3340 ui.write("%s\n" % ' '.join(output))
3341 3341
3342 3342 @command('import|patch',
3343 3343 [('p', 'strip', 1,
3344 3344 _('directory strip option for patch. This has the same '
3345 3345 'meaning as the corresponding patch option'), _('NUM')),
3346 3346 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3347 3347 ('e', 'edit', False, _('invoke editor on commit messages')),
3348 3348 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
3349 3349 ('', 'no-commit', None,
3350 3350 _("don't commit, just update the working directory")),
3351 3351 ('', 'bypass', None,
3352 3352 _("apply patch without touching the working directory")),
3353 3353 ('', 'exact', None,
3354 3354 _('apply patch to the nodes from which it was generated')),
3355 3355 ('', 'import-branch', None,
3356 3356 _('use any branch information in patch (implied by --exact)'))] +
3357 3357 commitopts + commitopts2 + similarityopts,
3358 3358 _('[OPTION]... PATCH...'))
3359 3359 def import_(ui, repo, patch1=None, *patches, **opts):
3360 3360 """import an ordered set of patches
3361 3361
3362 3362 Import a list of patches and commit them individually (unless
3363 3363 --no-commit is specified).
3364 3364
3365 3365 If there are outstanding changes in the working directory, import
3366 3366 will abort unless given the -f/--force flag.
3367 3367
3368 3368 You can import a patch straight from a mail message. Even patches
3369 3369 as attachments work (to use the body part, it must have type
3370 3370 text/plain or text/x-patch). From and Subject headers of email
3371 3371 message are used as default committer and commit message. All
3372 3372 text/plain body parts before first diff are added to commit
3373 3373 message.
3374 3374
3375 3375 If the imported patch was generated by :hg:`export`, user and
3376 3376 description from patch override values from message headers and
3377 3377 body. Values given on command line with -m/--message and -u/--user
3378 3378 override these.
3379 3379
3380 3380 If --exact is specified, import will set the working directory to
3381 3381 the parent of each patch before applying it, and will abort if the
3382 3382 resulting changeset has a different ID than the one recorded in
3383 3383 the patch. This may happen due to character set problems or other
3384 3384 deficiencies in the text patch format.
3385 3385
3386 3386 Use --bypass to apply and commit patches directly to the
3387 3387 repository, not touching the working directory. Without --exact,
3388 3388 patches will be applied on top of the working directory parent
3389 3389 revision.
3390 3390
3391 3391 With -s/--similarity, hg will attempt to discover renames and
3392 3392 copies in the patch in the same way as 'addremove'.
3393 3393
3394 3394 To read a patch from standard input, use "-" as the patch name. If
3395 3395 a URL is specified, the patch will be downloaded from it.
3396 3396 See :hg:`help dates` for a list of formats valid for -d/--date.
3397 3397
3398 3398 .. container:: verbose
3399 3399
3400 3400 Examples:
3401 3401
3402 3402 - import a traditional patch from a website and detect renames::
3403 3403
3404 3404 hg import -s 80 http://example.com/bugfix.patch
3405 3405
3406 3406 - import a changeset from an hgweb server::
3407 3407
3408 3408 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3409 3409
3410 3410 - import all the patches in an Unix-style mbox::
3411 3411
3412 3412 hg import incoming-patches.mbox
3413 3413
3414 3414 - attempt to exactly restore an exported changeset (not always
3415 3415 possible)::
3416 3416
3417 3417 hg import --exact proposed-fix.patch
3418 3418
3419 3419 Returns 0 on success.
3420 3420 """
3421 3421
3422 3422 if not patch1:
3423 3423 raise util.Abort(_('need at least one patch to import'))
3424 3424
3425 3425 patches = (patch1,) + patches
3426 3426
3427 3427 date = opts.get('date')
3428 3428 if date:
3429 3429 opts['date'] = util.parsedate(date)
3430 3430
3431 3431 editor = cmdutil.commiteditor
3432 3432 if opts.get('edit'):
3433 3433 editor = cmdutil.commitforceeditor
3434 3434
3435 3435 update = not opts.get('bypass')
3436 3436 if not update and opts.get('no_commit'):
3437 3437 raise util.Abort(_('cannot use --no-commit with --bypass'))
3438 3438 try:
3439 3439 sim = float(opts.get('similarity') or 0)
3440 3440 except ValueError:
3441 3441 raise util.Abort(_('similarity must be a number'))
3442 3442 if sim < 0 or sim > 100:
3443 3443 raise util.Abort(_('similarity must be between 0 and 100'))
3444 3444 if sim and not update:
3445 3445 raise util.Abort(_('cannot use --similarity with --bypass'))
3446 3446
3447 3447 if (opts.get('exact') or not opts.get('force')) and update:
3448 3448 cmdutil.bailifchanged(repo)
3449 3449
3450 3450 base = opts["base"]
3451 3451 strip = opts["strip"]
3452 3452 wlock = lock = tr = None
3453 3453 msgs = []
3454 3454
3455 3455 def checkexact(repo, n, nodeid):
3456 3456 if opts.get('exact') and hex(n) != nodeid:
3457 3457 repo.rollback()
3458 3458 raise util.Abort(_('patch is damaged or loses information'))
3459 3459
3460 3460 def tryone(ui, hunk, parents):
3461 3461 tmpname, message, user, date, branch, nodeid, p1, p2 = \
3462 3462 patch.extract(ui, hunk)
3463 3463
3464 3464 if not tmpname:
3465 3465 return (None, None)
3466 3466 msg = _('applied to working directory')
3467 3467
3468 3468 try:
3469 3469 cmdline_message = cmdutil.logmessage(ui, opts)
3470 3470 if cmdline_message:
3471 3471 # pickup the cmdline msg
3472 3472 message = cmdline_message
3473 3473 elif message:
3474 3474 # pickup the patch msg
3475 3475 message = message.strip()
3476 3476 else:
3477 3477 # launch the editor
3478 3478 message = None
3479 3479 ui.debug('message:\n%s\n' % message)
3480 3480
3481 3481 if len(parents) == 1:
3482 3482 parents.append(repo[nullid])
3483 3483 if opts.get('exact'):
3484 3484 if not nodeid or not p1:
3485 3485 raise util.Abort(_('not a Mercurial patch'))
3486 3486 p1 = repo[p1]
3487 3487 p2 = repo[p2 or nullid]
3488 3488 elif p2:
3489 3489 try:
3490 3490 p1 = repo[p1]
3491 3491 p2 = repo[p2]
3492 3492 except error.RepoError:
3493 3493 p1, p2 = parents
3494 3494 else:
3495 3495 p1, p2 = parents
3496 3496
3497 3497 n = None
3498 3498 if update:
3499 3499 if opts.get('exact') and p1 != parents[0]:
3500 3500 hg.clean(repo, p1.node())
3501 3501 if p1 != parents[0] and p2 != parents[1]:
3502 3502 repo.dirstate.setparents(p1.node(), p2.node())
3503 3503
3504 3504 if opts.get('exact') or opts.get('import_branch'):
3505 3505 repo.dirstate.setbranch(branch or 'default')
3506 3506
3507 3507 files = set()
3508 3508 patch.patch(ui, repo, tmpname, strip=strip, files=files,
3509 3509 eolmode=None, similarity=sim / 100.0)
3510 3510 files = list(files)
3511 3511 if opts.get('no_commit'):
3512 3512 if message:
3513 3513 msgs.append(message)
3514 3514 else:
3515 3515 if opts.get('exact'):
3516 3516 m = None
3517 3517 else:
3518 3518 m = scmutil.matchfiles(repo, files or [])
3519 3519 n = repo.commit(message, opts.get('user') or user,
3520 3520 opts.get('date') or date, match=m,
3521 3521 editor=editor)
3522 3522 checkexact(repo, n, nodeid)
3523 3523 else:
3524 3524 if opts.get('exact') or opts.get('import_branch'):
3525 3525 branch = branch or 'default'
3526 3526 else:
3527 3527 branch = p1.branch()
3528 3528 store = patch.filestore()
3529 3529 try:
3530 3530 files = set()
3531 3531 try:
3532 3532 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
3533 3533 files, eolmode=None)
3534 3534 except patch.PatchError, e:
3535 3535 raise util.Abort(str(e))
3536 3536 memctx = patch.makememctx(repo, (p1.node(), p2.node()),
3537 3537 message,
3538 3538 opts.get('user') or user,
3539 3539 opts.get('date') or date,
3540 3540 branch, files, store,
3541 3541 editor=cmdutil.commiteditor)
3542 3542 repo.savecommitmessage(memctx.description())
3543 3543 n = memctx.commit()
3544 3544 checkexact(repo, n, nodeid)
3545 3545 finally:
3546 3546 store.close()
3547 3547 if n:
3548 3548 # i18n: refers to a short changeset id
3549 3549 msg = _('created %s') % short(n)
3550 3550 return (msg, n)
3551 3551 finally:
3552 3552 os.unlink(tmpname)
3553 3553
3554 3554 try:
3555 3555 try:
3556 3556 wlock = repo.wlock()
3557 3557 lock = repo.lock()
3558 3558 tr = repo.transaction('import')
3559 3559 parents = repo.parents()
3560 3560 for patchurl in patches:
3561 3561 if patchurl == '-':
3562 3562 ui.status(_('applying patch from stdin\n'))
3563 3563 patchfile = ui.fin
3564 3564 patchurl = 'stdin' # for error message
3565 3565 else:
3566 3566 patchurl = os.path.join(base, patchurl)
3567 3567 ui.status(_('applying %s\n') % patchurl)
3568 3568 patchfile = url.open(ui, patchurl)
3569 3569
3570 3570 haspatch = False
3571 3571 for hunk in patch.split(patchfile):
3572 3572 (msg, node) = tryone(ui, hunk, parents)
3573 3573 if msg:
3574 3574 haspatch = True
3575 3575 ui.note(msg + '\n')
3576 3576 if update or opts.get('exact'):
3577 3577 parents = repo.parents()
3578 3578 else:
3579 3579 parents = [repo[node]]
3580 3580
3581 3581 if not haspatch:
3582 3582 raise util.Abort(_('%s: no diffs found') % patchurl)
3583 3583
3584 3584 tr.close()
3585 3585 if msgs:
3586 3586 repo.savecommitmessage('\n* * *\n'.join(msgs))
3587 3587 except:
3588 3588 # wlock.release() indirectly calls dirstate.write(): since
3589 3589 # we're crashing, we do not want to change the working dir
3590 3590 # parent after all, so make sure it writes nothing
3591 3591 repo.dirstate.invalidate()
3592 3592 raise
3593 3593 finally:
3594 3594 if tr:
3595 3595 tr.release()
3596 3596 release(lock, wlock)
3597 3597
3598 3598 @command('incoming|in',
3599 3599 [('f', 'force', None,
3600 3600 _('run even if remote repository is unrelated')),
3601 3601 ('n', 'newest-first', None, _('show newest record first')),
3602 3602 ('', 'bundle', '',
3603 3603 _('file to store the bundles into'), _('FILE')),
3604 3604 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3605 3605 ('B', 'bookmarks', False, _("compare bookmarks")),
3606 3606 ('b', 'branch', [],
3607 3607 _('a specific branch you would like to pull'), _('BRANCH')),
3608 3608 ] + logopts + remoteopts + subrepoopts,
3609 3609 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3610 3610 def incoming(ui, repo, source="default", **opts):
3611 3611 """show new changesets found in source
3612 3612
3613 3613 Show new changesets found in the specified path/URL or the default
3614 3614 pull location. These are the changesets that would have been pulled
3615 3615 if a pull at the time you issued this command.
3616 3616
3617 3617 For remote repository, using --bundle avoids downloading the
3618 3618 changesets twice if the incoming is followed by a pull.
3619 3619
3620 3620 See pull for valid source format details.
3621 3621
3622 3622 Returns 0 if there are incoming changes, 1 otherwise.
3623 3623 """
3624 3624 if opts.get('bundle') and opts.get('subrepos'):
3625 3625 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3626 3626
3627 3627 if opts.get('bookmarks'):
3628 3628 source, branches = hg.parseurl(ui.expandpath(source),
3629 3629 opts.get('branch'))
3630 3630 other = hg.peer(repo, opts, source)
3631 3631 if 'bookmarks' not in other.listkeys('namespaces'):
3632 3632 ui.warn(_("remote doesn't support bookmarks\n"))
3633 3633 return 0
3634 3634 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3635 3635 return bookmarks.diff(ui, repo, other)
3636 3636
3637 3637 repo._subtoppath = ui.expandpath(source)
3638 3638 try:
3639 3639 return hg.incoming(ui, repo, source, opts)
3640 3640 finally:
3641 3641 del repo._subtoppath
3642 3642
3643 3643
3644 3644 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'))
3645 3645 def init(ui, dest=".", **opts):
3646 3646 """create a new repository in the given directory
3647 3647
3648 3648 Initialize a new repository in the given directory. If the given
3649 3649 directory does not exist, it will be created.
3650 3650
3651 3651 If no directory is given, the current directory is used.
3652 3652
3653 3653 It is possible to specify an ``ssh://`` URL as the destination.
3654 3654 See :hg:`help urls` for more information.
3655 3655
3656 3656 Returns 0 on success.
3657 3657 """
3658 3658 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3659 3659
3660 3660 @command('locate',
3661 3661 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3662 3662 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3663 3663 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3664 3664 ] + walkopts,
3665 3665 _('[OPTION]... [PATTERN]...'))
3666 3666 def locate(ui, repo, *pats, **opts):
3667 3667 """locate files matching specific patterns
3668 3668
3669 3669 Print files under Mercurial control in the working directory whose
3670 3670 names match the given patterns.
3671 3671
3672 3672 By default, this command searches all directories in the working
3673 3673 directory. To search just the current directory and its
3674 3674 subdirectories, use "--include .".
3675 3675
3676 3676 If no patterns are given to match, this command prints the names
3677 3677 of all files under Mercurial control in the working directory.
3678 3678
3679 3679 If you want to feed the output of this command into the "xargs"
3680 3680 command, use the -0 option to both this command and "xargs". This
3681 3681 will avoid the problem of "xargs" treating single filenames that
3682 3682 contain whitespace as multiple filenames.
3683 3683
3684 3684 Returns 0 if a match is found, 1 otherwise.
3685 3685 """
3686 3686 end = opts.get('print0') and '\0' or '\n'
3687 3687 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3688 3688
3689 3689 ret = 1
3690 3690 m = scmutil.match(repo[rev], pats, opts, default='relglob')
3691 3691 m.bad = lambda x, y: False
3692 3692 for abs in repo[rev].walk(m):
3693 3693 if not rev and abs not in repo.dirstate:
3694 3694 continue
3695 3695 if opts.get('fullpath'):
3696 3696 ui.write(repo.wjoin(abs), end)
3697 3697 else:
3698 3698 ui.write(((pats and m.rel(abs)) or abs), end)
3699 3699 ret = 0
3700 3700
3701 3701 return ret
3702 3702
3703 3703 @command('^log|history',
3704 3704 [('f', 'follow', None,
3705 3705 _('follow changeset history, or file history across copies and renames')),
3706 3706 ('', 'follow-first', None,
3707 3707 _('only follow the first parent of merge changesets')),
3708 3708 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3709 3709 ('C', 'copies', None, _('show copied files')),
3710 3710 ('k', 'keyword', [],
3711 3711 _('do case-insensitive search for a given text'), _('TEXT')),
3712 3712 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
3713 3713 ('', 'removed', None, _('include revisions where files were removed')),
3714 3714 ('m', 'only-merges', None, _('show only merges')),
3715 3715 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3716 3716 ('', 'only-branch', [],
3717 3717 _('show only changesets within the given named branch (DEPRECATED)'),
3718 3718 _('BRANCH')),
3719 3719 ('b', 'branch', [],
3720 3720 _('show changesets within the given named branch'), _('BRANCH')),
3721 3721 ('P', 'prune', [],
3722 3722 _('do not display revision or any of its ancestors'), _('REV')),
3723 3723 ('', 'hidden', False, _('show hidden changesets')),
3724 3724 ] + logopts + walkopts,
3725 3725 _('[OPTION]... [FILE]'))
3726 3726 def log(ui, repo, *pats, **opts):
3727 3727 """show revision history of entire repository or files
3728 3728
3729 3729 Print the revision history of the specified files or the entire
3730 3730 project.
3731 3731
3732 3732 If no revision range is specified, the default is ``tip:0`` unless
3733 3733 --follow is set, in which case the working directory parent is
3734 3734 used as the starting revision.
3735 3735
3736 3736 File history is shown without following rename or copy history of
3737 3737 files. Use -f/--follow with a filename to follow history across
3738 3738 renames and copies. --follow without a filename will only show
3739 3739 ancestors or descendants of the starting revision.
3740 3740
3741 3741 By default this command prints revision number and changeset id,
3742 3742 tags, non-trivial parents, user, date and time, and a summary for
3743 3743 each commit. When the -v/--verbose switch is used, the list of
3744 3744 changed files and full commit message are shown.
3745 3745
3746 3746 .. note::
3747 3747 log -p/--patch may generate unexpected diff output for merge
3748 3748 changesets, as it will only compare the merge changeset against
3749 3749 its first parent. Also, only files different from BOTH parents
3750 3750 will appear in files:.
3751 3751
3752 3752 .. note::
3753 3753 for performance reasons, log FILE may omit duplicate changes
3754 3754 made on branches and will not show deletions. To see all
3755 3755 changes including duplicates and deletions, use the --removed
3756 3756 switch.
3757 3757
3758 3758 .. container:: verbose
3759 3759
3760 3760 Some examples:
3761 3761
3762 3762 - changesets with full descriptions and file lists::
3763 3763
3764 3764 hg log -v
3765 3765
3766 3766 - changesets ancestral to the working directory::
3767 3767
3768 3768 hg log -f
3769 3769
3770 3770 - last 10 commits on the current branch::
3771 3771
3772 3772 hg log -l 10 -b .
3773 3773
3774 3774 - changesets showing all modifications of a file, including removals::
3775 3775
3776 3776 hg log --removed file.c
3777 3777
3778 3778 - all changesets that touch a directory, with diffs, excluding merges::
3779 3779
3780 3780 hg log -Mp lib/
3781 3781
3782 3782 - all revision numbers that match a keyword::
3783 3783
3784 3784 hg log -k bug --template "{rev}\\n"
3785 3785
3786 3786 - check if a given changeset is included is a tagged release::
3787 3787
3788 3788 hg log -r "a21ccf and ancestor(1.9)"
3789 3789
3790 3790 - find all changesets by some user in a date range::
3791 3791
3792 3792 hg log -k alice -d "may 2008 to jul 2008"
3793 3793
3794 3794 - summary of all changesets after the last tag::
3795 3795
3796 3796 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3797 3797
3798 3798 See :hg:`help dates` for a list of formats valid for -d/--date.
3799 3799
3800 3800 See :hg:`help revisions` and :hg:`help revsets` for more about
3801 3801 specifying revisions.
3802 3802
3803 3803 Returns 0 on success.
3804 3804 """
3805 3805
3806 3806 matchfn = scmutil.match(repo[None], pats, opts)
3807 3807 limit = cmdutil.loglimit(opts)
3808 3808 count = 0
3809 3809
3810 3810 endrev = None
3811 3811 if opts.get('copies') and opts.get('rev'):
3812 3812 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
3813 3813
3814 3814 df = False
3815 3815 if opts["date"]:
3816 3816 df = util.matchdate(opts["date"])
3817 3817
3818 3818 branches = opts.get('branch', []) + opts.get('only_branch', [])
3819 3819 opts['branch'] = [repo.lookupbranch(b) for b in branches]
3820 3820
3821 3821 displayer = cmdutil.show_changeset(ui, repo, opts, True)
3822 3822 def prep(ctx, fns):
3823 3823 rev = ctx.rev()
3824 3824 parents = [p for p in repo.changelog.parentrevs(rev)
3825 3825 if p != nullrev]
3826 3826 if opts.get('no_merges') and len(parents) == 2:
3827 3827 return
3828 3828 if opts.get('only_merges') and len(parents) != 2:
3829 3829 return
3830 3830 if opts.get('branch') and ctx.branch() not in opts['branch']:
3831 3831 return
3832 3832 if not opts.get('hidden') and ctx.hidden():
3833 3833 return
3834 3834 if df and not df(ctx.date()[0]):
3835 3835 return
3836 3836 if opts['user'] and not [k for k in opts['user']
3837 3837 if k.lower() in ctx.user().lower()]:
3838 3838 return
3839 3839 if opts.get('keyword'):
3840 3840 for k in [kw.lower() for kw in opts['keyword']]:
3841 3841 if (k in ctx.user().lower() or
3842 3842 k in ctx.description().lower() or
3843 3843 k in " ".join(ctx.files()).lower()):
3844 3844 break
3845 3845 else:
3846 3846 return
3847 3847
3848 3848 copies = None
3849 3849 if opts.get('copies') and rev:
3850 3850 copies = []
3851 3851 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3852 3852 for fn in ctx.files():
3853 3853 rename = getrenamed(fn, rev)
3854 3854 if rename:
3855 3855 copies.append((fn, rename[0]))
3856 3856
3857 3857 revmatchfn = None
3858 3858 if opts.get('patch') or opts.get('stat'):
3859 3859 if opts.get('follow') or opts.get('follow_first'):
3860 3860 # note: this might be wrong when following through merges
3861 3861 revmatchfn = scmutil.match(repo[None], fns, default='path')
3862 3862 else:
3863 3863 revmatchfn = matchfn
3864 3864
3865 3865 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3866 3866
3867 3867 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3868 3868 if count == limit:
3869 3869 break
3870 3870 if displayer.flush(ctx.rev()):
3871 3871 count += 1
3872 3872 displayer.close()
3873 3873
3874 3874 @command('manifest',
3875 3875 [('r', 'rev', '', _('revision to display'), _('REV')),
3876 3876 ('', 'all', False, _("list files from all revisions"))],
3877 3877 _('[-r REV]'))
3878 3878 def manifest(ui, repo, node=None, rev=None, **opts):
3879 3879 """output the current or given revision of the project manifest
3880 3880
3881 3881 Print a list of version controlled files for the given revision.
3882 3882 If no revision is given, the first parent of the working directory
3883 3883 is used, or the null revision if no revision is checked out.
3884 3884
3885 3885 With -v, print file permissions, symlink and executable bits.
3886 3886 With --debug, print file revision hashes.
3887 3887
3888 3888 If option --all is specified, the list of all files from all revisions
3889 3889 is printed. This includes deleted and renamed files.
3890 3890
3891 3891 Returns 0 on success.
3892 3892 """
3893 3893 if opts.get('all'):
3894 3894 if rev or node:
3895 3895 raise util.Abort(_("can't specify a revision with --all"))
3896 3896
3897 3897 res = []
3898 3898 prefix = "data/"
3899 3899 suffix = ".i"
3900 3900 plen = len(prefix)
3901 3901 slen = len(suffix)
3902 3902 lock = repo.lock()
3903 3903 try:
3904 3904 for fn, b, size in repo.store.datafiles():
3905 3905 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3906 3906 res.append(fn[plen:-slen])
3907 3907 finally:
3908 3908 lock.release()
3909 3909 for f in sorted(res):
3910 3910 ui.write("%s\n" % f)
3911 3911 return
3912 3912
3913 3913 if rev and node:
3914 3914 raise util.Abort(_("please specify just one revision"))
3915 3915
3916 3916 if not node:
3917 3917 node = rev
3918 3918
3919 3919 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
3920 3920 ctx = scmutil.revsingle(repo, node)
3921 3921 for f in ctx:
3922 3922 if ui.debugflag:
3923 3923 ui.write("%40s " % hex(ctx.manifest()[f]))
3924 3924 if ui.verbose:
3925 3925 ui.write(decor[ctx.flags(f)])
3926 3926 ui.write("%s\n" % f)
3927 3927
3928 3928 @command('^merge',
3929 3929 [('f', 'force', None, _('force a merge with outstanding changes')),
3930 3930 ('r', 'rev', '', _('revision to merge'), _('REV')),
3931 3931 ('P', 'preview', None,
3932 3932 _('review revisions to merge (no merge is performed)'))
3933 3933 ] + mergetoolopts,
3934 3934 _('[-P] [-f] [[-r] REV]'))
3935 3935 def merge(ui, repo, node=None, **opts):
3936 3936 """merge working directory with another revision
3937 3937
3938 3938 The current working directory is updated with all changes made in
3939 3939 the requested revision since the last common predecessor revision.
3940 3940
3941 3941 Files that changed between either parent are marked as changed for
3942 3942 the next commit and a commit must be performed before any further
3943 3943 updates to the repository are allowed. The next commit will have
3944 3944 two parents.
3945 3945
3946 3946 ``--tool`` can be used to specify the merge tool used for file
3947 3947 merges. It overrides the HGMERGE environment variable and your
3948 3948 configuration files. See :hg:`help merge-tools` for options.
3949 3949
3950 3950 If no revision is specified, the working directory's parent is a
3951 3951 head revision, and the current branch contains exactly one other
3952 3952 head, the other head is merged with by default. Otherwise, an
3953 3953 explicit revision with which to merge with must be provided.
3954 3954
3955 3955 :hg:`resolve` must be used to resolve unresolved files.
3956 3956
3957 3957 To undo an uncommitted merge, use :hg:`update --clean .` which
3958 3958 will check out a clean copy of the original merge parent, losing
3959 3959 all changes.
3960 3960
3961 3961 Returns 0 on success, 1 if there are unresolved files.
3962 3962 """
3963 3963
3964 3964 if opts.get('rev') and node:
3965 3965 raise util.Abort(_("please specify just one revision"))
3966 3966 if not node:
3967 3967 node = opts.get('rev')
3968 3968
3969 3969 if not node:
3970 3970 branch = repo[None].branch()
3971 3971 bheads = repo.branchheads(branch)
3972 3972 if len(bheads) > 2:
3973 3973 raise util.Abort(_("branch '%s' has %d heads - "
3974 3974 "please merge with an explicit rev")
3975 3975 % (branch, len(bheads)),
3976 3976 hint=_("run 'hg heads .' to see heads"))
3977 3977
3978 3978 parent = repo.dirstate.p1()
3979 3979 if len(bheads) == 1:
3980 3980 if len(repo.heads()) > 1:
3981 3981 raise util.Abort(_("branch '%s' has one head - "
3982 3982 "please merge with an explicit rev")
3983 3983 % branch,
3984 3984 hint=_("run 'hg heads' to see all heads"))
3985 3985 msg = _('there is nothing to merge')
3986 3986 if parent != repo.lookup(repo[None].branch()):
3987 3987 msg = _('%s - use "hg update" instead') % msg
3988 3988 raise util.Abort(msg)
3989 3989
3990 3990 if parent not in bheads:
3991 3991 raise util.Abort(_('working directory not at a head revision'),
3992 3992 hint=_("use 'hg update' or merge with an "
3993 3993 "explicit revision"))
3994 3994 node = parent == bheads[0] and bheads[-1] or bheads[0]
3995 3995 else:
3996 3996 node = scmutil.revsingle(repo, node).node()
3997 3997
3998 3998 if opts.get('preview'):
3999 3999 # find nodes that are ancestors of p2 but not of p1
4000 4000 p1 = repo.lookup('.')
4001 4001 p2 = repo.lookup(node)
4002 4002 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4003 4003
4004 4004 displayer = cmdutil.show_changeset(ui, repo, opts)
4005 4005 for node in nodes:
4006 4006 displayer.show(repo[node])
4007 4007 displayer.close()
4008 4008 return 0
4009 4009
4010 4010 try:
4011 4011 # ui.forcemerge is an internal variable, do not document
4012 4012 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4013 4013 return hg.merge(repo, node, force=opts.get('force'))
4014 4014 finally:
4015 4015 ui.setconfig('ui', 'forcemerge', '')
4016 4016
4017 4017 @command('outgoing|out',
4018 4018 [('f', 'force', None, _('run even when the destination is unrelated')),
4019 4019 ('r', 'rev', [],
4020 4020 _('a changeset intended to be included in the destination'), _('REV')),
4021 4021 ('n', 'newest-first', None, _('show newest record first')),
4022 4022 ('B', 'bookmarks', False, _('compare bookmarks')),
4023 4023 ('b', 'branch', [], _('a specific branch you would like to push'),
4024 4024 _('BRANCH')),
4025 4025 ] + logopts + remoteopts + subrepoopts,
4026 4026 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4027 4027 def outgoing(ui, repo, dest=None, **opts):
4028 4028 """show changesets not found in the destination
4029 4029
4030 4030 Show changesets not found in the specified destination repository
4031 4031 or the default push location. These are the changesets that would
4032 4032 be pushed if a push was requested.
4033 4033
4034 4034 See pull for details of valid destination formats.
4035 4035
4036 4036 Returns 0 if there are outgoing changes, 1 otherwise.
4037 4037 """
4038 4038
4039 4039 if opts.get('bookmarks'):
4040 4040 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4041 4041 dest, branches = hg.parseurl(dest, opts.get('branch'))
4042 4042 other = hg.peer(repo, opts, dest)
4043 4043 if 'bookmarks' not in other.listkeys('namespaces'):
4044 4044 ui.warn(_("remote doesn't support bookmarks\n"))
4045 4045 return 0
4046 4046 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4047 4047 return bookmarks.diff(ui, other, repo)
4048 4048
4049 4049 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4050 4050 try:
4051 4051 return hg.outgoing(ui, repo, dest, opts)
4052 4052 finally:
4053 4053 del repo._subtoppath
4054 4054
4055 4055 @command('parents',
4056 4056 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4057 4057 ] + templateopts,
4058 4058 _('[-r REV] [FILE]'))
4059 4059 def parents(ui, repo, file_=None, **opts):
4060 4060 """show the parents of the working directory or revision
4061 4061
4062 4062 Print the working directory's parent revisions. If a revision is
4063 4063 given via -r/--rev, the parent of that revision will be printed.
4064 4064 If a file argument is given, the revision in which the file was
4065 4065 last changed (before the working directory revision or the
4066 4066 argument to --rev if given) is printed.
4067 4067
4068 4068 Returns 0 on success.
4069 4069 """
4070 4070
4071 4071 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4072 4072
4073 4073 if file_:
4074 4074 m = scmutil.match(ctx, (file_,), opts)
4075 4075 if m.anypats() or len(m.files()) != 1:
4076 4076 raise util.Abort(_('can only specify an explicit filename'))
4077 4077 file_ = m.files()[0]
4078 4078 filenodes = []
4079 4079 for cp in ctx.parents():
4080 4080 if not cp:
4081 4081 continue
4082 4082 try:
4083 4083 filenodes.append(cp.filenode(file_))
4084 4084 except error.LookupError:
4085 4085 pass
4086 4086 if not filenodes:
4087 4087 raise util.Abort(_("'%s' not found in manifest!") % file_)
4088 4088 fl = repo.file(file_)
4089 4089 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
4090 4090 else:
4091 4091 p = [cp.node() for cp in ctx.parents()]
4092 4092
4093 4093 displayer = cmdutil.show_changeset(ui, repo, opts)
4094 4094 for n in p:
4095 4095 if n != nullid:
4096 4096 displayer.show(repo[n])
4097 4097 displayer.close()
4098 4098
4099 4099 @command('paths', [], _('[NAME]'))
4100 4100 def paths(ui, repo, search=None):
4101 4101 """show aliases for remote repositories
4102 4102
4103 4103 Show definition of symbolic path name NAME. If no name is given,
4104 4104 show definition of all available names.
4105 4105
4106 4106 Option -q/--quiet suppresses all output when searching for NAME
4107 4107 and shows only the path names when listing all definitions.
4108 4108
4109 4109 Path names are defined in the [paths] section of your
4110 4110 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4111 4111 repository, ``.hg/hgrc`` is used, too.
4112 4112
4113 4113 The path names ``default`` and ``default-push`` have a special
4114 4114 meaning. When performing a push or pull operation, they are used
4115 4115 as fallbacks if no location is specified on the command-line.
4116 4116 When ``default-push`` is set, it will be used for push and
4117 4117 ``default`` will be used for pull; otherwise ``default`` is used
4118 4118 as the fallback for both. When cloning a repository, the clone
4119 4119 source is written as ``default`` in ``.hg/hgrc``. Note that
4120 4120 ``default`` and ``default-push`` apply to all inbound (e.g.
4121 4121 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4122 4122 :hg:`bundle`) operations.
4123 4123
4124 4124 See :hg:`help urls` for more information.
4125 4125
4126 4126 Returns 0 on success.
4127 4127 """
4128 4128 if search:
4129 4129 for name, path in ui.configitems("paths"):
4130 4130 if name == search:
4131 4131 ui.status("%s\n" % util.hidepassword(path))
4132 4132 return
4133 4133 if not ui.quiet:
4134 4134 ui.warn(_("not found!\n"))
4135 4135 return 1
4136 4136 else:
4137 4137 for name, path in ui.configitems("paths"):
4138 4138 if ui.quiet:
4139 4139 ui.write("%s\n" % name)
4140 4140 else:
4141 4141 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4142 4142
4143 4143 def postincoming(ui, repo, modheads, optupdate, checkout):
4144 4144 if modheads == 0:
4145 4145 return
4146 4146 if optupdate:
4147 4147 try:
4148 4148 return hg.update(repo, checkout)
4149 4149 except util.Abort, inst:
4150 4150 ui.warn(_("not updating: %s\n" % str(inst)))
4151 4151 return 0
4152 4152 if modheads > 1:
4153 4153 currentbranchheads = len(repo.branchheads())
4154 4154 if currentbranchheads == modheads:
4155 4155 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4156 4156 elif currentbranchheads > 1:
4157 4157 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
4158 4158 else:
4159 4159 ui.status(_("(run 'hg heads' to see heads)\n"))
4160 4160 else:
4161 4161 ui.status(_("(run 'hg update' to get a working copy)\n"))
4162 4162
4163 4163 @command('^pull',
4164 4164 [('u', 'update', None,
4165 4165 _('update to new branch head if changesets were pulled')),
4166 4166 ('f', 'force', None, _('run even when remote repository is unrelated')),
4167 4167 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4168 4168 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4169 4169 ('b', 'branch', [], _('a specific branch you would like to pull'),
4170 4170 _('BRANCH')),
4171 4171 ] + remoteopts,
4172 4172 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4173 4173 def pull(ui, repo, source="default", **opts):
4174 4174 """pull changes from the specified source
4175 4175
4176 4176 Pull changes from a remote repository to a local one.
4177 4177
4178 4178 This finds all changes from the repository at the specified path
4179 4179 or URL and adds them to a local repository (the current one unless
4180 4180 -R is specified). By default, this does not update the copy of the
4181 4181 project in the working directory.
4182 4182
4183 4183 Use :hg:`incoming` if you want to see what would have been added
4184 4184 by a pull at the time you issued this command. If you then decide
4185 4185 to add those changes to the repository, you should use :hg:`pull
4186 4186 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4187 4187
4188 4188 If SOURCE is omitted, the 'default' path will be used.
4189 4189 See :hg:`help urls` for more information.
4190 4190
4191 4191 Returns 0 on success, 1 if an update had unresolved files.
4192 4192 """
4193 4193 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4194 4194 other = hg.peer(repo, opts, source)
4195 4195 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4196 4196 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
4197 4197
4198 4198 if opts.get('bookmark'):
4199 4199 if not revs:
4200 4200 revs = []
4201 4201 rb = other.listkeys('bookmarks')
4202 4202 for b in opts['bookmark']:
4203 4203 if b not in rb:
4204 4204 raise util.Abort(_('remote bookmark %s not found!') % b)
4205 4205 revs.append(rb[b])
4206 4206
4207 4207 if revs:
4208 4208 try:
4209 4209 revs = [other.lookup(rev) for rev in revs]
4210 4210 except error.CapabilityError:
4211 4211 err = _("other repository doesn't support revision lookup, "
4212 4212 "so a rev cannot be specified.")
4213 4213 raise util.Abort(err)
4214 4214
4215 4215 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4216 4216 bookmarks.updatefromremote(ui, repo, other)
4217 4217 if checkout:
4218 4218 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4219 4219 repo._subtoppath = source
4220 4220 try:
4221 4221 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4222 4222
4223 4223 finally:
4224 4224 del repo._subtoppath
4225 4225
4226 4226 # update specified bookmarks
4227 4227 if opts.get('bookmark'):
4228 4228 for b in opts['bookmark']:
4229 4229 # explicit pull overrides local bookmark if any
4230 4230 ui.status(_("importing bookmark %s\n") % b)
4231 4231 repo._bookmarks[b] = repo[rb[b]].node()
4232 4232 bookmarks.write(repo)
4233 4233
4234 4234 return ret
4235 4235
4236 4236 @command('^push',
4237 4237 [('f', 'force', None, _('force push')),
4238 4238 ('r', 'rev', [],
4239 4239 _('a changeset intended to be included in the destination'),
4240 4240 _('REV')),
4241 4241 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4242 4242 ('b', 'branch', [],
4243 4243 _('a specific branch you would like to push'), _('BRANCH')),
4244 4244 ('', 'new-branch', False, _('allow pushing a new branch')),
4245 4245 ] + remoteopts,
4246 4246 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4247 4247 def push(ui, repo, dest=None, **opts):
4248 4248 """push changes to the specified destination
4249 4249
4250 4250 Push changesets from the local repository to the specified
4251 4251 destination.
4252 4252
4253 4253 This operation is symmetrical to pull: it is identical to a pull
4254 4254 in the destination repository from the current one.
4255 4255
4256 4256 By default, push will not allow creation of new heads at the
4257 4257 destination, since multiple heads would make it unclear which head
4258 4258 to use. In this situation, it is recommended to pull and merge
4259 4259 before pushing.
4260 4260
4261 4261 Use --new-branch if you want to allow push to create a new named
4262 4262 branch that is not present at the destination. This allows you to
4263 4263 only create a new branch without forcing other changes.
4264 4264
4265 4265 Use -f/--force to override the default behavior and push all
4266 4266 changesets on all branches.
4267 4267
4268 4268 If -r/--rev is used, the specified revision and all its ancestors
4269 4269 will be pushed to the remote repository.
4270 4270
4271 4271 Please see :hg:`help urls` for important details about ``ssh://``
4272 4272 URLs. If DESTINATION is omitted, a default path will be used.
4273 4273
4274 4274 Returns 0 if push was successful, 1 if nothing to push.
4275 4275 """
4276 4276
4277 4277 if opts.get('bookmark'):
4278 4278 for b in opts['bookmark']:
4279 4279 # translate -B options to -r so changesets get pushed
4280 4280 if b in repo._bookmarks:
4281 4281 opts.setdefault('rev', []).append(b)
4282 4282 else:
4283 4283 # if we try to push a deleted bookmark, translate it to null
4284 4284 # this lets simultaneous -r, -b options continue working
4285 4285 opts.setdefault('rev', []).append("null")
4286 4286
4287 4287 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4288 4288 dest, branches = hg.parseurl(dest, opts.get('branch'))
4289 4289 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4290 4290 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4291 4291 other = hg.peer(repo, opts, dest)
4292 4292 if revs:
4293 4293 revs = [repo.lookup(rev) for rev in revs]
4294 4294
4295 4295 repo._subtoppath = dest
4296 4296 try:
4297 4297 # push subrepos depth-first for coherent ordering
4298 4298 c = repo['']
4299 4299 subs = c.substate # only repos that are committed
4300 4300 for s in sorted(subs):
4301 4301 if not c.sub(s).push(opts.get('force')):
4302 4302 return False
4303 4303 finally:
4304 4304 del repo._subtoppath
4305 4305 result = repo.push(other, opts.get('force'), revs=revs,
4306 4306 newbranch=opts.get('new_branch'))
4307 4307
4308 4308 result = (result == 0)
4309 4309
4310 4310 if opts.get('bookmark'):
4311 4311 rb = other.listkeys('bookmarks')
4312 4312 for b in opts['bookmark']:
4313 4313 # explicit push overrides remote bookmark if any
4314 4314 if b in repo._bookmarks:
4315 4315 ui.status(_("exporting bookmark %s\n") % b)
4316 4316 new = repo[b].hex()
4317 4317 elif b in rb:
4318 4318 ui.status(_("deleting remote bookmark %s\n") % b)
4319 4319 new = '' # delete
4320 4320 else:
4321 4321 ui.warn(_('bookmark %s does not exist on the local '
4322 4322 'or remote repository!\n') % b)
4323 4323 return 2
4324 4324 old = rb.get(b, '')
4325 4325 r = other.pushkey('bookmarks', b, old, new)
4326 4326 if not r:
4327 4327 ui.warn(_('updating bookmark %s failed!\n') % b)
4328 4328 if not result:
4329 4329 result = 2
4330 4330
4331 4331 return result
4332 4332
4333 4333 @command('recover', [])
4334 4334 def recover(ui, repo):
4335 4335 """roll back an interrupted transaction
4336 4336
4337 4337 Recover from an interrupted commit or pull.
4338 4338
4339 4339 This command tries to fix the repository status after an
4340 4340 interrupted operation. It should only be necessary when Mercurial
4341 4341 suggests it.
4342 4342
4343 4343 Returns 0 if successful, 1 if nothing to recover or verify fails.
4344 4344 """
4345 4345 if repo.recover():
4346 4346 return hg.verify(repo)
4347 4347 return 1
4348 4348
4349 4349 @command('^remove|rm',
4350 4350 [('A', 'after', None, _('record delete for missing files')),
4351 4351 ('f', 'force', None,
4352 4352 _('remove (and delete) file even if added or modified')),
4353 4353 ] + walkopts,
4354 4354 _('[OPTION]... FILE...'))
4355 4355 def remove(ui, repo, *pats, **opts):
4356 4356 """remove the specified files on the next commit
4357 4357
4358 4358 Schedule the indicated files for removal from the current branch.
4359 4359
4360 4360 This command schedules the files to be removed at the next commit.
4361 4361 To undo a remove before that, see :hg:`revert`. To undo added
4362 4362 files, see :hg:`forget`.
4363 4363
4364 4364 .. container:: verbose
4365 4365
4366 4366 -A/--after can be used to remove only files that have already
4367 4367 been deleted, -f/--force can be used to force deletion, and -Af
4368 4368 can be used to remove files from the next revision without
4369 4369 deleting them from the working directory.
4370 4370
4371 4371 The following table details the behavior of remove for different
4372 4372 file states (columns) and option combinations (rows). The file
4373 4373 states are Added [A], Clean [C], Modified [M] and Missing [!]
4374 4374 (as reported by :hg:`status`). The actions are Warn, Remove
4375 4375 (from branch) and Delete (from disk):
4376 4376
4377 4377 ======= == == == ==
4378 4378 A C M !
4379 4379 ======= == == == ==
4380 4380 none W RD W R
4381 4381 -f R RD RD R
4382 4382 -A W W W R
4383 4383 -Af R R R R
4384 4384 ======= == == == ==
4385 4385
4386 4386 Note that remove never deletes files in Added [A] state from the
4387 4387 working directory, not even if option --force is specified.
4388 4388
4389 4389 Returns 0 on success, 1 if any warnings encountered.
4390 4390 """
4391 4391
4392 4392 ret = 0
4393 4393 after, force = opts.get('after'), opts.get('force')
4394 4394 if not pats and not after:
4395 4395 raise util.Abort(_('no files specified'))
4396 4396
4397 4397 m = scmutil.match(repo[None], pats, opts)
4398 4398 s = repo.status(match=m, clean=True)
4399 4399 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4400 4400
4401 4401 for f in m.files():
4402 4402 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
4403 4403 if os.path.exists(m.rel(f)):
4404 4404 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4405 4405 ret = 1
4406 4406
4407 4407 if force:
4408 4408 list = modified + deleted + clean + added
4409 4409 elif after:
4410 4410 list = deleted
4411 4411 for f in modified + added + clean:
4412 4412 ui.warn(_('not removing %s: file still exists (use -f'
4413 4413 ' to force removal)\n') % m.rel(f))
4414 4414 ret = 1
4415 4415 else:
4416 4416 list = deleted + clean
4417 4417 for f in modified:
4418 4418 ui.warn(_('not removing %s: file is modified (use -f'
4419 4419 ' to force removal)\n') % m.rel(f))
4420 4420 ret = 1
4421 4421 for f in added:
4422 4422 ui.warn(_('not removing %s: file has been marked for add'
4423 4423 ' (use forget to undo)\n') % m.rel(f))
4424 4424 ret = 1
4425 4425
4426 4426 for f in sorted(list):
4427 4427 if ui.verbose or not m.exact(f):
4428 4428 ui.status(_('removing %s\n') % m.rel(f))
4429 4429
4430 4430 wlock = repo.wlock()
4431 4431 try:
4432 4432 if not after:
4433 4433 for f in list:
4434 4434 if f in added:
4435 4435 continue # we never unlink added files on remove
4436 4436 try:
4437 4437 util.unlinkpath(repo.wjoin(f))
4438 4438 except OSError, inst:
4439 4439 if inst.errno != errno.ENOENT:
4440 4440 raise
4441 4441 repo[None].forget(list)
4442 4442 finally:
4443 4443 wlock.release()
4444 4444
4445 4445 return ret
4446 4446
4447 4447 @command('rename|move|mv',
4448 4448 [('A', 'after', None, _('record a rename that has already occurred')),
4449 4449 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4450 4450 ] + walkopts + dryrunopts,
4451 4451 _('[OPTION]... SOURCE... DEST'))
4452 4452 def rename(ui, repo, *pats, **opts):
4453 4453 """rename files; equivalent of copy + remove
4454 4454
4455 4455 Mark dest as copies of sources; mark sources for deletion. If dest
4456 4456 is a directory, copies are put in that directory. If dest is a
4457 4457 file, there can only be one source.
4458 4458
4459 4459 By default, this command copies the contents of files as they
4460 4460 exist in the working directory. If invoked with -A/--after, the
4461 4461 operation is recorded, but no copying is performed.
4462 4462
4463 4463 This command takes effect at the next commit. To undo a rename
4464 4464 before that, see :hg:`revert`.
4465 4465
4466 4466 Returns 0 on success, 1 if errors are encountered.
4467 4467 """
4468 4468 wlock = repo.wlock(False)
4469 4469 try:
4470 4470 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4471 4471 finally:
4472 4472 wlock.release()
4473 4473
4474 4474 @command('resolve',
4475 4475 [('a', 'all', None, _('select all unresolved files')),
4476 4476 ('l', 'list', None, _('list state of files needing merge')),
4477 4477 ('m', 'mark', None, _('mark files as resolved')),
4478 4478 ('u', 'unmark', None, _('mark files as unresolved')),
4479 4479 ('n', 'no-status', None, _('hide status prefix'))]
4480 4480 + mergetoolopts + walkopts,
4481 4481 _('[OPTION]... [FILE]...'))
4482 4482 def resolve(ui, repo, *pats, **opts):
4483 4483 """redo merges or set/view the merge status of files
4484 4484
4485 4485 Merges with unresolved conflicts are often the result of
4486 4486 non-interactive merging using the ``internal:merge`` configuration
4487 4487 setting, or a command-line merge tool like ``diff3``. The resolve
4488 4488 command is used to manage the files involved in a merge, after
4489 4489 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4490 4490 working directory must have two parents).
4491 4491
4492 4492 The resolve command can be used in the following ways:
4493 4493
4494 4494 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4495 4495 files, discarding any previous merge attempts. Re-merging is not
4496 4496 performed for files already marked as resolved. Use ``--all/-a``
4497 4497 to select all unresolved files. ``--tool`` can be used to specify
4498 4498 the merge tool used for the given files. It overrides the HGMERGE
4499 4499 environment variable and your configuration files. Previous file
4500 4500 contents are saved with a ``.orig`` suffix.
4501 4501
4502 4502 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4503 4503 (e.g. after having manually fixed-up the files). The default is
4504 4504 to mark all unresolved files.
4505 4505
4506 4506 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4507 4507 default is to mark all resolved files.
4508 4508
4509 4509 - :hg:`resolve -l`: list files which had or still have conflicts.
4510 4510 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4511 4511
4512 4512 Note that Mercurial will not let you commit files with unresolved
4513 4513 merge conflicts. You must use :hg:`resolve -m ...` before you can
4514 4514 commit after a conflicting merge.
4515 4515
4516 4516 Returns 0 on success, 1 if any files fail a resolve attempt.
4517 4517 """
4518 4518
4519 4519 all, mark, unmark, show, nostatus = \
4520 4520 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
4521 4521
4522 4522 if (show and (mark or unmark)) or (mark and unmark):
4523 4523 raise util.Abort(_("too many options specified"))
4524 4524 if pats and all:
4525 4525 raise util.Abort(_("can't specify --all and patterns"))
4526 4526 if not (all or pats or show or mark or unmark):
4527 4527 raise util.Abort(_('no files or directories specified; '
4528 4528 'use --all to remerge all files'))
4529 4529
4530 4530 ms = mergemod.mergestate(repo)
4531 4531 m = scmutil.match(repo[None], pats, opts)
4532 4532 ret = 0
4533 4533
4534 4534 for f in ms:
4535 4535 if m(f):
4536 4536 if show:
4537 4537 if nostatus:
4538 4538 ui.write("%s\n" % f)
4539 4539 else:
4540 4540 ui.write("%s %s\n" % (ms[f].upper(), f),
4541 4541 label='resolve.' +
4542 4542 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
4543 4543 elif mark:
4544 4544 ms.mark(f, "r")
4545 4545 elif unmark:
4546 4546 ms.mark(f, "u")
4547 4547 else:
4548 4548 wctx = repo[None]
4549 4549 mctx = wctx.parents()[-1]
4550 4550
4551 4551 # backup pre-resolve (merge uses .orig for its own purposes)
4552 4552 a = repo.wjoin(f)
4553 4553 util.copyfile(a, a + ".resolve")
4554 4554
4555 4555 try:
4556 4556 # resolve file
4557 4557 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4558 4558 if ms.resolve(f, wctx, mctx):
4559 4559 ret = 1
4560 4560 finally:
4561 4561 ui.setconfig('ui', 'forcemerge', '')
4562 4562
4563 4563 # replace filemerge's .orig file with our resolve file
4564 4564 util.rename(a + ".resolve", a + ".orig")
4565 4565
4566 4566 ms.commit()
4567 4567 return ret
4568 4568
4569 4569 @command('revert',
4570 4570 [('a', 'all', None, _('revert all changes when no arguments given')),
4571 4571 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4572 4572 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4573 4573 ('C', 'no-backup', None, _('do not save backup copies of files')),
4574 4574 ] + walkopts + dryrunopts,
4575 4575 _('[OPTION]... [-r REV] [NAME]...'))
4576 4576 def revert(ui, repo, *pats, **opts):
4577 4577 """restore files to their checkout state
4578 4578
4579 4579 .. note::
4580 4580 To check out earlier revisions, you should use :hg:`update REV`.
4581 4581 To cancel a merge (and lose your changes), use :hg:`update --clean .`.
4582 4582
4583 4583 With no revision specified, revert the specified files or directories
4584 4584 to the contents they had in the parent of the working directory.
4585 4585 This restores the contents of files to an unmodified
4586 4586 state and unschedules adds, removes, copies, and renames. If the
4587 4587 working directory has two parents, you must explicitly specify a
4588 4588 revision.
4589 4589
4590 4590 Using the -r/--rev or -d/--date options, revert the given files or
4591 4591 directories to their states as of a specific revision. Because
4592 4592 revert does not change the working directory parents, this will
4593 4593 cause these files to appear modified. This can be helpful to "back
4594 4594 out" some or all of an earlier change. See :hg:`backout` for a
4595 4595 related method.
4596 4596
4597 4597 Modified files are saved with a .orig suffix before reverting.
4598 4598 To disable these backups, use --no-backup.
4599 4599
4600 4600 See :hg:`help dates` for a list of formats valid for -d/--date.
4601 4601
4602 4602 Returns 0 on success.
4603 4603 """
4604 4604
4605 4605 if opts.get("date"):
4606 4606 if opts.get("rev"):
4607 4607 raise util.Abort(_("you can't specify a revision and a date"))
4608 4608 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4609 4609
4610 4610 parent, p2 = repo.dirstate.parents()
4611 4611 if not opts.get('rev') and p2 != nullid:
4612 4612 # revert after merge is a trap for new users (issue2915)
4613 4613 raise util.Abort(_('uncommitted merge with no revision specified'),
4614 4614 hint=_('use "hg update" or see "hg help revert"'))
4615 4615
4616 4616 ctx = scmutil.revsingle(repo, opts.get('rev'))
4617 4617 node = ctx.node()
4618 4618
4619 4619 if not pats and not opts.get('all'):
4620 4620 msg = _("no files or directories specified")
4621 4621 if p2 != nullid:
4622 4622 hint = _("uncommitted merge, use --all to discard all changes,"
4623 4623 " or 'hg update -C .' to abort the merge")
4624 4624 raise util.Abort(msg, hint=hint)
4625 4625 dirty = util.any(repo.status())
4626 4626 if node != parent:
4627 4627 if dirty:
4628 4628 hint = _("uncommitted changes, use --all to discard all"
4629 4629 " changes, or 'hg update %s' to update") % ctx.rev()
4630 4630 else:
4631 4631 hint = _("use --all to revert all files,"
4632 4632 " or 'hg update %s' to update") % ctx.rev()
4633 4633 elif dirty:
4634 4634 hint = _("uncommitted changes, use --all to discard all changes")
4635 4635 else:
4636 4636 hint = _("use --all to revert all files")
4637 4637 raise util.Abort(msg, hint=hint)
4638 4638
4639 4639 mf = ctx.manifest()
4640 4640 if node == parent:
4641 4641 pmf = mf
4642 4642 else:
4643 4643 pmf = None
4644 4644
4645 4645 # need all matching names in dirstate and manifest of target rev,
4646 4646 # so have to walk both. do not print errors if files exist in one
4647 4647 # but not other.
4648 4648
4649 4649 names = {}
4650 4650
4651 4651 wlock = repo.wlock()
4652 4652 try:
4653 4653 # walk dirstate.
4654 4654
4655 4655 m = scmutil.match(repo[None], pats, opts)
4656 4656 m.bad = lambda x, y: False
4657 4657 for abs in repo.walk(m):
4658 4658 names[abs] = m.rel(abs), m.exact(abs)
4659 4659
4660 4660 # walk target manifest.
4661 4661
4662 4662 def badfn(path, msg):
4663 4663 if path in names:
4664 4664 return
4665 4665 if path in repo[node].substate:
4666 4666 ui.warn("%s: %s\n" % (m.rel(path),
4667 4667 'reverting subrepos is unsupported'))
4668 4668 return
4669 4669 path_ = path + '/'
4670 4670 for f in names:
4671 4671 if f.startswith(path_):
4672 4672 return
4673 4673 ui.warn("%s: %s\n" % (m.rel(path), msg))
4674 4674
4675 4675 m = scmutil.match(repo[node], pats, opts)
4676 4676 m.bad = badfn
4677 4677 for abs in repo[node].walk(m):
4678 4678 if abs not in names:
4679 4679 names[abs] = m.rel(abs), m.exact(abs)
4680 4680
4681 4681 m = scmutil.matchfiles(repo, names)
4682 4682 changes = repo.status(match=m)[:4]
4683 4683 modified, added, removed, deleted = map(set, changes)
4684 4684
4685 4685 # if f is a rename, also revert the source
4686 4686 cwd = repo.getcwd()
4687 4687 for f in added:
4688 4688 src = repo.dirstate.copied(f)
4689 4689 if src and src not in names and repo.dirstate[src] == 'r':
4690 4690 removed.add(src)
4691 4691 names[src] = (repo.pathto(src, cwd), True)
4692 4692
4693 4693 def removeforget(abs):
4694 4694 if repo.dirstate[abs] == 'a':
4695 4695 return _('forgetting %s\n')
4696 4696 return _('removing %s\n')
4697 4697
4698 4698 revert = ([], _('reverting %s\n'))
4699 4699 add = ([], _('adding %s\n'))
4700 4700 remove = ([], removeforget)
4701 4701 undelete = ([], _('undeleting %s\n'))
4702 4702
4703 4703 disptable = (
4704 4704 # dispatch table:
4705 4705 # file state
4706 4706 # action if in target manifest
4707 4707 # action if not in target manifest
4708 4708 # make backup if in target manifest
4709 4709 # make backup if not in target manifest
4710 4710 (modified, revert, remove, True, True),
4711 4711 (added, revert, remove, True, False),
4712 4712 (removed, undelete, None, False, False),
4713 4713 (deleted, revert, remove, False, False),
4714 4714 )
4715 4715
4716 4716 for abs, (rel, exact) in sorted(names.items()):
4717 4717 mfentry = mf.get(abs)
4718 4718 target = repo.wjoin(abs)
4719 4719 def handle(xlist, dobackup):
4720 4720 xlist[0].append(abs)
4721 4721 if (dobackup and not opts.get('no_backup') and
4722 4722 os.path.lexists(target)):
4723 4723 bakname = "%s.orig" % rel
4724 4724 ui.note(_('saving current version of %s as %s\n') %
4725 4725 (rel, bakname))
4726 4726 if not opts.get('dry_run'):
4727 4727 util.rename(target, bakname)
4728 4728 if ui.verbose or not exact:
4729 4729 msg = xlist[1]
4730 4730 if not isinstance(msg, basestring):
4731 4731 msg = msg(abs)
4732 4732 ui.status(msg % rel)
4733 4733 for table, hitlist, misslist, backuphit, backupmiss in disptable:
4734 4734 if abs not in table:
4735 4735 continue
4736 4736 # file has changed in dirstate
4737 4737 if mfentry:
4738 4738 handle(hitlist, backuphit)
4739 4739 elif misslist is not None:
4740 4740 handle(misslist, backupmiss)
4741 4741 break
4742 4742 else:
4743 4743 if abs not in repo.dirstate:
4744 4744 if mfentry:
4745 4745 handle(add, True)
4746 4746 elif exact:
4747 4747 ui.warn(_('file not managed: %s\n') % rel)
4748 4748 continue
4749 4749 # file has not changed in dirstate
4750 4750 if node == parent:
4751 4751 if exact:
4752 4752 ui.warn(_('no changes needed to %s\n') % rel)
4753 4753 continue
4754 4754 if pmf is None:
4755 4755 # only need parent manifest in this unlikely case,
4756 4756 # so do not read by default
4757 4757 pmf = repo[parent].manifest()
4758 4758 if abs in pmf:
4759 4759 if mfentry:
4760 4760 # if version of file is same in parent and target
4761 4761 # manifests, do nothing
4762 4762 if (pmf[abs] != mfentry or
4763 4763 pmf.flags(abs) != mf.flags(abs)):
4764 4764 handle(revert, False)
4765 4765 else:
4766 4766 handle(remove, False)
4767 4767
4768 4768 if not opts.get('dry_run'):
4769 4769 def checkout(f):
4770 4770 fc = ctx[f]
4771 4771 repo.wwrite(f, fc.data(), fc.flags())
4772 4772
4773 4773 audit_path = scmutil.pathauditor(repo.root)
4774 4774 for f in remove[0]:
4775 4775 if repo.dirstate[f] == 'a':
4776 4776 repo.dirstate.drop(f)
4777 4777 continue
4778 4778 audit_path(f)
4779 4779 try:
4780 4780 util.unlinkpath(repo.wjoin(f))
4781 4781 except OSError:
4782 4782 pass
4783 4783 repo.dirstate.remove(f)
4784 4784
4785 4785 normal = None
4786 4786 if node == parent:
4787 4787 # We're reverting to our parent. If possible, we'd like status
4788 4788 # to report the file as clean. We have to use normallookup for
4789 4789 # merges to avoid losing information about merged/dirty files.
4790 4790 if p2 != nullid:
4791 4791 normal = repo.dirstate.normallookup
4792 4792 else:
4793 4793 normal = repo.dirstate.normal
4794 4794 for f in revert[0]:
4795 4795 checkout(f)
4796 4796 if normal:
4797 4797 normal(f)
4798 4798
4799 4799 for f in add[0]:
4800 4800 checkout(f)
4801 4801 repo.dirstate.add(f)
4802 4802
4803 4803 normal = repo.dirstate.normallookup
4804 4804 if node == parent and p2 == nullid:
4805 4805 normal = repo.dirstate.normal
4806 4806 for f in undelete[0]:
4807 4807 checkout(f)
4808 4808 normal(f)
4809 4809
4810 4810 finally:
4811 4811 wlock.release()
4812 4812
4813 4813 @command('rollback', dryrunopts +
4814 4814 [('f', 'force', False, _('ignore safety measures'))])
4815 4815 def rollback(ui, repo, **opts):
4816 4816 """roll back the last transaction (dangerous)
4817 4817
4818 4818 This command should be used with care. There is only one level of
4819 4819 rollback, and there is no way to undo a rollback. It will also
4820 4820 restore the dirstate at the time of the last transaction, losing
4821 4821 any dirstate changes since that time. This command does not alter
4822 4822 the working directory.
4823 4823
4824 4824 Transactions are used to encapsulate the effects of all commands
4825 4825 that create new changesets or propagate existing changesets into a
4826 4826 repository. For example, the following commands are transactional,
4827 4827 and their effects can be rolled back:
4828 4828
4829 4829 - commit
4830 4830 - import
4831 4831 - pull
4832 4832 - push (with this repository as the destination)
4833 4833 - unbundle
4834 4834
4835 4835 It's possible to lose data with rollback: commit, update back to
4836 4836 an older changeset, and then rollback. The update removes the
4837 4837 changes you committed from the working directory, and rollback
4838 4838 removes them from history. To avoid data loss, you must pass
4839 4839 --force in this case.
4840 4840
4841 4841 This command is not intended for use on public repositories. Once
4842 4842 changes are visible for pull by other users, rolling a transaction
4843 4843 back locally is ineffective (someone else may already have pulled
4844 4844 the changes). Furthermore, a race is possible with readers of the
4845 4845 repository; for example an in-progress pull from the repository
4846 4846 may fail if a rollback is performed.
4847 4847
4848 4848 Returns 0 on success, 1 if no rollback data is available.
4849 4849 """
4850 4850 return repo.rollback(dryrun=opts.get('dry_run'),
4851 4851 force=opts.get('force'))
4852 4852
4853 4853 @command('root', [])
4854 4854 def root(ui, repo):
4855 4855 """print the root (top) of the current working directory
4856 4856
4857 4857 Print the root directory of the current repository.
4858 4858
4859 4859 Returns 0 on success.
4860 4860 """
4861 4861 ui.write(repo.root + "\n")
4862 4862
4863 4863 @command('^serve',
4864 4864 [('A', 'accesslog', '', _('name of access log file to write to'),
4865 4865 _('FILE')),
4866 4866 ('d', 'daemon', None, _('run server in background')),
4867 4867 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
4868 4868 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4869 4869 # use string type, then we can check if something was passed
4870 4870 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4871 4871 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4872 4872 _('ADDR')),
4873 4873 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4874 4874 _('PREFIX')),
4875 4875 ('n', 'name', '',
4876 4876 _('name to show in web pages (default: working directory)'), _('NAME')),
4877 4877 ('', 'web-conf', '',
4878 4878 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
4879 4879 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4880 4880 _('FILE')),
4881 4881 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4882 4882 ('', 'stdio', None, _('for remote clients')),
4883 4883 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
4884 4884 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4885 4885 ('', 'style', '', _('template style to use'), _('STYLE')),
4886 4886 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4887 4887 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
4888 4888 _('[OPTION]...'))
4889 4889 def serve(ui, repo, **opts):
4890 4890 """start stand-alone webserver
4891 4891
4892 4892 Start a local HTTP repository browser and pull server. You can use
4893 4893 this for ad-hoc sharing and browsing of repositories. It is
4894 4894 recommended to use a real web server to serve a repository for
4895 4895 longer periods of time.
4896 4896
4897 4897 Please note that the server does not implement access control.
4898 4898 This means that, by default, anybody can read from the server and
4899 4899 nobody can write to it by default. Set the ``web.allow_push``
4900 4900 option to ``*`` to allow everybody to push to the server. You
4901 4901 should use a real web server if you need to authenticate users.
4902 4902
4903 4903 By default, the server logs accesses to stdout and errors to
4904 4904 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4905 4905 files.
4906 4906
4907 4907 To have the server choose a free port number to listen on, specify
4908 4908 a port number of 0; in this case, the server will print the port
4909 4909 number it uses.
4910 4910
4911 4911 Returns 0 on success.
4912 4912 """
4913 4913
4914 4914 if opts["stdio"] and opts["cmdserver"]:
4915 4915 raise util.Abort(_("cannot use --stdio with --cmdserver"))
4916 4916
4917 4917 def checkrepo():
4918 4918 if repo is None:
4919 4919 raise error.RepoError(_("There is no Mercurial repository here"
4920 4920 " (.hg not found)"))
4921 4921
4922 4922 if opts["stdio"]:
4923 4923 checkrepo()
4924 4924 s = sshserver.sshserver(ui, repo)
4925 4925 s.serve_forever()
4926 4926
4927 4927 if opts["cmdserver"]:
4928 4928 checkrepo()
4929 4929 s = commandserver.server(ui, repo, opts["cmdserver"])
4930 4930 return s.serve()
4931 4931
4932 4932 # this way we can check if something was given in the command-line
4933 4933 if opts.get('port'):
4934 4934 opts['port'] = util.getport(opts.get('port'))
4935 4935
4936 4936 baseui = repo and repo.baseui or ui
4937 4937 optlist = ("name templates style address port prefix ipv6"
4938 4938 " accesslog errorlog certificate encoding")
4939 4939 for o in optlist.split():
4940 4940 val = opts.get(o, '')
4941 4941 if val in (None, ''): # should check against default options instead
4942 4942 continue
4943 4943 baseui.setconfig("web", o, val)
4944 4944 if repo and repo.ui != baseui:
4945 4945 repo.ui.setconfig("web", o, val)
4946 4946
4947 4947 o = opts.get('web_conf') or opts.get('webdir_conf')
4948 4948 if not o:
4949 4949 if not repo:
4950 4950 raise error.RepoError(_("There is no Mercurial repository"
4951 4951 " here (.hg not found)"))
4952 4952 o = repo.root
4953 4953
4954 4954 app = hgweb.hgweb(o, baseui=ui)
4955 4955
4956 4956 class service(object):
4957 4957 def init(self):
4958 4958 util.setsignalhandler()
4959 4959 self.httpd = hgweb.server.create_server(ui, app)
4960 4960
4961 4961 if opts['port'] and not ui.verbose:
4962 4962 return
4963 4963
4964 4964 if self.httpd.prefix:
4965 4965 prefix = self.httpd.prefix.strip('/') + '/'
4966 4966 else:
4967 4967 prefix = ''
4968 4968
4969 4969 port = ':%d' % self.httpd.port
4970 4970 if port == ':80':
4971 4971 port = ''
4972 4972
4973 4973 bindaddr = self.httpd.addr
4974 4974 if bindaddr == '0.0.0.0':
4975 4975 bindaddr = '*'
4976 4976 elif ':' in bindaddr: # IPv6
4977 4977 bindaddr = '[%s]' % bindaddr
4978 4978
4979 4979 fqaddr = self.httpd.fqaddr
4980 4980 if ':' in fqaddr:
4981 4981 fqaddr = '[%s]' % fqaddr
4982 4982 if opts['port']:
4983 4983 write = ui.status
4984 4984 else:
4985 4985 write = ui.write
4986 4986 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
4987 4987 (fqaddr, port, prefix, bindaddr, self.httpd.port))
4988 4988
4989 4989 def run(self):
4990 4990 self.httpd.serve_forever()
4991 4991
4992 4992 service = service()
4993 4993
4994 4994 cmdutil.service(opts, initfn=service.init, runfn=service.run)
4995 4995
4996 4996 @command('showconfig|debugconfig',
4997 4997 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4998 4998 _('[-u] [NAME]...'))
4999 4999 def showconfig(ui, repo, *values, **opts):
5000 5000 """show combined config settings from all hgrc files
5001 5001
5002 5002 With no arguments, print names and values of all config items.
5003 5003
5004 5004 With one argument of the form section.name, print just the value
5005 5005 of that config item.
5006 5006
5007 5007 With multiple arguments, print names and values of all config
5008 5008 items with matching section names.
5009 5009
5010 5010 With --debug, the source (filename and line number) is printed
5011 5011 for each config item.
5012 5012
5013 5013 Returns 0 on success.
5014 5014 """
5015 5015
5016 5016 for f in scmutil.rcpath():
5017 5017 ui.debug('read config from: %s\n' % f)
5018 5018 untrusted = bool(opts.get('untrusted'))
5019 5019 if values:
5020 5020 sections = [v for v in values if '.' not in v]
5021 5021 items = [v for v in values if '.' in v]
5022 5022 if len(items) > 1 or items and sections:
5023 5023 raise util.Abort(_('only one config item permitted'))
5024 5024 for section, name, value in ui.walkconfig(untrusted=untrusted):
5025 5025 value = str(value).replace('\n', '\\n')
5026 5026 sectname = section + '.' + name
5027 5027 if values:
5028 5028 for v in values:
5029 5029 if v == section:
5030 5030 ui.debug('%s: ' %
5031 5031 ui.configsource(section, name, untrusted))
5032 5032 ui.write('%s=%s\n' % (sectname, value))
5033 5033 elif v == sectname:
5034 5034 ui.debug('%s: ' %
5035 5035 ui.configsource(section, name, untrusted))
5036 5036 ui.write(value, '\n')
5037 5037 else:
5038 5038 ui.debug('%s: ' %
5039 5039 ui.configsource(section, name, untrusted))
5040 5040 ui.write('%s=%s\n' % (sectname, value))
5041 5041
5042 5042 @command('^status|st',
5043 5043 [('A', 'all', None, _('show status of all files')),
5044 5044 ('m', 'modified', None, _('show only modified files')),
5045 5045 ('a', 'added', None, _('show only added files')),
5046 5046 ('r', 'removed', None, _('show only removed files')),
5047 5047 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5048 5048 ('c', 'clean', None, _('show only files without changes')),
5049 5049 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5050 5050 ('i', 'ignored', None, _('show only ignored files')),
5051 5051 ('n', 'no-status', None, _('hide status prefix')),
5052 5052 ('C', 'copies', None, _('show source of copied files')),
5053 5053 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5054 5054 ('', 'rev', [], _('show difference from revision'), _('REV')),
5055 5055 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5056 5056 ] + walkopts + subrepoopts,
5057 5057 _('[OPTION]... [FILE]...'))
5058 5058 def status(ui, repo, *pats, **opts):
5059 5059 """show changed files in the working directory
5060 5060
5061 5061 Show status of files in the repository. If names are given, only
5062 5062 files that match are shown. Files that are clean or ignored or
5063 5063 the source of a copy/move operation, are not listed unless
5064 5064 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5065 5065 Unless options described with "show only ..." are given, the
5066 5066 options -mardu are used.
5067 5067
5068 5068 Option -q/--quiet hides untracked (unknown and ignored) files
5069 5069 unless explicitly requested with -u/--unknown or -i/--ignored.
5070 5070
5071 5071 .. note::
5072 5072 status may appear to disagree with diff if permissions have
5073 5073 changed or a merge has occurred. The standard diff format does
5074 5074 not report permission changes and diff only reports changes
5075 5075 relative to one merge parent.
5076 5076
5077 5077 If one revision is given, it is used as the base revision.
5078 5078 If two revisions are given, the differences between them are
5079 5079 shown. The --change option can also be used as a shortcut to list
5080 5080 the changed files of a revision from its first parent.
5081 5081
5082 5082 The codes used to show the status of files are::
5083 5083
5084 5084 M = modified
5085 5085 A = added
5086 5086 R = removed
5087 5087 C = clean
5088 5088 ! = missing (deleted by non-hg command, but still tracked)
5089 5089 ? = not tracked
5090 5090 I = ignored
5091 5091 = origin of the previous file listed as A (added)
5092 5092
5093 5093 .. container:: verbose
5094 5094
5095 5095 Examples:
5096 5096
5097 5097 - show changes in the working directory relative to a changeset:
5098 5098
5099 5099 hg status --rev 9353
5100 5100
5101 5101 - show all changes including copies in an existing changeset::
5102 5102
5103 5103 hg status --copies --change 9353
5104 5104
5105 5105 - get a NUL separated list of added files, suitable for xargs::
5106 5106
5107 5107 hg status -an0
5108 5108
5109 5109 Returns 0 on success.
5110 5110 """
5111 5111
5112 5112 revs = opts.get('rev')
5113 5113 change = opts.get('change')
5114 5114
5115 5115 if revs and change:
5116 5116 msg = _('cannot specify --rev and --change at the same time')
5117 5117 raise util.Abort(msg)
5118 5118 elif change:
5119 5119 node2 = repo.lookup(change)
5120 5120 node1 = repo[node2].p1().node()
5121 5121 else:
5122 5122 node1, node2 = scmutil.revpair(repo, revs)
5123 5123
5124 5124 cwd = (pats and repo.getcwd()) or ''
5125 5125 end = opts.get('print0') and '\0' or '\n'
5126 5126 copy = {}
5127 5127 states = 'modified added removed deleted unknown ignored clean'.split()
5128 5128 show = [k for k in states if opts.get(k)]
5129 5129 if opts.get('all'):
5130 5130 show += ui.quiet and (states[:4] + ['clean']) or states
5131 5131 if not show:
5132 5132 show = ui.quiet and states[:4] or states[:5]
5133 5133
5134 5134 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5135 5135 'ignored' in show, 'clean' in show, 'unknown' in show,
5136 5136 opts.get('subrepos'))
5137 5137 changestates = zip(states, 'MAR!?IC', stat)
5138 5138
5139 5139 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5140 5140 ctxn = repo[nullid]
5141 5141 ctx1 = repo[node1]
5142 5142 ctx2 = repo[node2]
5143 5143 added = stat[1]
5144 5144 if node2 is None:
5145 5145 added = stat[0] + stat[1] # merged?
5146 5146
5147 5147 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
5148 5148 if k in added:
5149 5149 copy[k] = v
5150 5150 elif v in added:
5151 5151 copy[v] = k
5152 5152
5153 5153 for state, char, files in changestates:
5154 5154 if state in show:
5155 5155 format = "%s %%s%s" % (char, end)
5156 5156 if opts.get('no_status'):
5157 5157 format = "%%s%s" % end
5158 5158
5159 5159 for f in files:
5160 5160 ui.write(format % repo.pathto(f, cwd),
5161 5161 label='status.' + state)
5162 5162 if f in copy:
5163 5163 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
5164 5164 label='status.copied')
5165 5165
5166 5166 @command('^summary|sum',
5167 5167 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5168 5168 def summary(ui, repo, **opts):
5169 5169 """summarize working directory state
5170 5170
5171 5171 This generates a brief summary of the working directory state,
5172 5172 including parents, branch, commit status, and available updates.
5173 5173
5174 5174 With the --remote option, this will check the default paths for
5175 5175 incoming and outgoing changes. This can be time-consuming.
5176 5176
5177 5177 Returns 0 on success.
5178 5178 """
5179 5179
5180 5180 ctx = repo[None]
5181 5181 parents = ctx.parents()
5182 5182 pnode = parents[0].node()
5183 5183 marks = []
5184 5184
5185 5185 for p in parents:
5186 5186 # label with log.changeset (instead of log.parent) since this
5187 5187 # shows a working directory parent *changeset*:
5188 5188 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5189 5189 label='log.changeset')
5190 5190 ui.write(' '.join(p.tags()), label='log.tag')
5191 5191 if p.bookmarks():
5192 5192 marks.extend(p.bookmarks())
5193 5193 if p.rev() == -1:
5194 5194 if not len(repo):
5195 5195 ui.write(_(' (empty repository)'))
5196 5196 else:
5197 5197 ui.write(_(' (no revision checked out)'))
5198 5198 ui.write('\n')
5199 5199 if p.description():
5200 5200 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5201 5201 label='log.summary')
5202 5202
5203 5203 branch = ctx.branch()
5204 5204 bheads = repo.branchheads(branch)
5205 5205 m = _('branch: %s\n') % branch
5206 5206 if branch != 'default':
5207 5207 ui.write(m, label='log.branch')
5208 5208 else:
5209 5209 ui.status(m, label='log.branch')
5210 5210
5211 5211 if marks:
5212 5212 current = repo._bookmarkcurrent
5213 5213 ui.write(_('bookmarks:'), label='log.bookmark')
5214 5214 if current is not None:
5215 5215 try:
5216 5216 marks.remove(current)
5217 5217 ui.write(' *' + current, label='bookmarks.current')
5218 5218 except ValueError:
5219 5219 # current bookmark not in parent ctx marks
5220 5220 pass
5221 5221 for m in marks:
5222 5222 ui.write(' ' + m, label='log.bookmark')
5223 5223 ui.write('\n', label='log.bookmark')
5224 5224
5225 5225 st = list(repo.status(unknown=True))[:6]
5226 5226
5227 5227 c = repo.dirstate.copies()
5228 5228 copied, renamed = [], []
5229 5229 for d, s in c.iteritems():
5230 5230 if s in st[2]:
5231 5231 st[2].remove(s)
5232 5232 renamed.append(d)
5233 5233 else:
5234 5234 copied.append(d)
5235 5235 if d in st[1]:
5236 5236 st[1].remove(d)
5237 5237 st.insert(3, renamed)
5238 5238 st.insert(4, copied)
5239 5239
5240 5240 ms = mergemod.mergestate(repo)
5241 5241 st.append([f for f in ms if ms[f] == 'u'])
5242 5242
5243 5243 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5244 5244 st.append(subs)
5245 5245
5246 5246 labels = [ui.label(_('%d modified'), 'status.modified'),
5247 5247 ui.label(_('%d added'), 'status.added'),
5248 5248 ui.label(_('%d removed'), 'status.removed'),
5249 5249 ui.label(_('%d renamed'), 'status.copied'),
5250 5250 ui.label(_('%d copied'), 'status.copied'),
5251 5251 ui.label(_('%d deleted'), 'status.deleted'),
5252 5252 ui.label(_('%d unknown'), 'status.unknown'),
5253 5253 ui.label(_('%d ignored'), 'status.ignored'),
5254 5254 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5255 5255 ui.label(_('%d subrepos'), 'status.modified')]
5256 5256 t = []
5257 5257 for s, l in zip(st, labels):
5258 5258 if s:
5259 5259 t.append(l % len(s))
5260 5260
5261 5261 t = ', '.join(t)
5262 5262 cleanworkdir = False
5263 5263
5264 5264 if len(parents) > 1:
5265 5265 t += _(' (merge)')
5266 5266 elif branch != parents[0].branch():
5267 5267 t += _(' (new branch)')
5268 5268 elif (parents[0].extra().get('close') and
5269 5269 pnode in repo.branchheads(branch, closed=True)):
5270 5270 t += _(' (head closed)')
5271 5271 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5272 5272 t += _(' (clean)')
5273 5273 cleanworkdir = True
5274 5274 elif pnode not in bheads:
5275 5275 t += _(' (new branch head)')
5276 5276
5277 5277 if cleanworkdir:
5278 5278 ui.status(_('commit: %s\n') % t.strip())
5279 5279 else:
5280 5280 ui.write(_('commit: %s\n') % t.strip())
5281 5281
5282 5282 # all ancestors of branch heads - all ancestors of parent = new csets
5283 5283 new = [0] * len(repo)
5284 5284 cl = repo.changelog
5285 5285 for a in [cl.rev(n) for n in bheads]:
5286 5286 new[a] = 1
5287 5287 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
5288 5288 new[a] = 1
5289 5289 for a in [p.rev() for p in parents]:
5290 5290 if a >= 0:
5291 5291 new[a] = 0
5292 5292 for a in cl.ancestors(*[p.rev() for p in parents]):
5293 5293 new[a] = 0
5294 5294 new = sum(new)
5295 5295
5296 5296 if new == 0:
5297 5297 ui.status(_('update: (current)\n'))
5298 5298 elif pnode not in bheads:
5299 5299 ui.write(_('update: %d new changesets (update)\n') % new)
5300 5300 else:
5301 5301 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5302 5302 (new, len(bheads)))
5303 5303
5304 5304 if opts.get('remote'):
5305 5305 t = []
5306 5306 source, branches = hg.parseurl(ui.expandpath('default'))
5307 5307 other = hg.peer(repo, {}, source)
5308 5308 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
5309 5309 ui.debug('comparing with %s\n' % util.hidepassword(source))
5310 5310 repo.ui.pushbuffer()
5311 5311 commoninc = discovery.findcommonincoming(repo, other)
5312 5312 _common, incoming, _rheads = commoninc
5313 5313 repo.ui.popbuffer()
5314 5314 if incoming:
5315 5315 t.append(_('1 or more incoming'))
5316 5316
5317 5317 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5318 5318 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5319 5319 if source != dest:
5320 5320 other = hg.peer(repo, {}, dest)
5321 5321 commoninc = None
5322 5322 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5323 5323 repo.ui.pushbuffer()
5324 5324 common, outheads = discovery.findcommonoutgoing(repo, other,
5325 5325 commoninc=commoninc)
5326 5326 repo.ui.popbuffer()
5327 5327 o = repo.changelog.findmissing(common=common, heads=outheads)
5328 5328 if o:
5329 5329 t.append(_('%d outgoing') % len(o))
5330 5330 if 'bookmarks' in other.listkeys('namespaces'):
5331 5331 lmarks = repo.listkeys('bookmarks')
5332 5332 rmarks = other.listkeys('bookmarks')
5333 5333 diff = set(rmarks) - set(lmarks)
5334 5334 if len(diff) > 0:
5335 5335 t.append(_('%d incoming bookmarks') % len(diff))
5336 5336 diff = set(lmarks) - set(rmarks)
5337 5337 if len(diff) > 0:
5338 5338 t.append(_('%d outgoing bookmarks') % len(diff))
5339 5339
5340 5340 if t:
5341 5341 ui.write(_('remote: %s\n') % (', '.join(t)))
5342 5342 else:
5343 5343 ui.status(_('remote: (synced)\n'))
5344 5344
5345 5345 @command('tag',
5346 5346 [('f', 'force', None, _('force tag')),
5347 5347 ('l', 'local', None, _('make the tag local')),
5348 5348 ('r', 'rev', '', _('revision to tag'), _('REV')),
5349 5349 ('', 'remove', None, _('remove a tag')),
5350 5350 # -l/--local is already there, commitopts cannot be used
5351 5351 ('e', 'edit', None, _('edit commit message')),
5352 5352 ('m', 'message', '', _('use <text> as commit message'), _('TEXT')),
5353 5353 ] + commitopts2,
5354 5354 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5355 5355 def tag(ui, repo, name1, *names, **opts):
5356 5356 """add one or more tags for the current or given revision
5357 5357
5358 5358 Name a particular revision using <name>.
5359 5359
5360 5360 Tags are used to name particular revisions of the repository and are
5361 5361 very useful to compare different revisions, to go back to significant
5362 5362 earlier versions or to mark branch points as releases, etc. Changing
5363 5363 an existing tag is normally disallowed; use -f/--force to override.
5364 5364
5365 5365 If no revision is given, the parent of the working directory is
5366 5366 used, or tip if no revision is checked out.
5367 5367
5368 5368 To facilitate version control, distribution, and merging of tags,
5369 5369 they are stored as a file named ".hgtags" which is managed similarly
5370 5370 to other project files and can be hand-edited if necessary. This
5371 5371 also means that tagging creates a new commit. The file
5372 5372 ".hg/localtags" is used for local tags (not shared among
5373 5373 repositories).
5374 5374
5375 5375 Tag commits are usually made at the head of a branch. If the parent
5376 5376 of the working directory is not a branch head, :hg:`tag` aborts; use
5377 5377 -f/--force to force the tag commit to be based on a non-head
5378 5378 changeset.
5379 5379
5380 5380 See :hg:`help dates` for a list of formats valid for -d/--date.
5381 5381
5382 5382 Since tag names have priority over branch names during revision
5383 5383 lookup, using an existing branch name as a tag name is discouraged.
5384 5384
5385 5385 Returns 0 on success.
5386 5386 """
5387 5387
5388 5388 rev_ = "."
5389 5389 names = [t.strip() for t in (name1,) + names]
5390 5390 if len(names) != len(set(names)):
5391 5391 raise util.Abort(_('tag names must be unique'))
5392 5392 for n in names:
5393 5393 if n in ['tip', '.', 'null']:
5394 5394 raise util.Abort(_("the name '%s' is reserved") % n)
5395 5395 if not n:
5396 5396 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
5397 5397 if opts.get('rev') and opts.get('remove'):
5398 5398 raise util.Abort(_("--rev and --remove are incompatible"))
5399 5399 if opts.get('rev'):
5400 5400 rev_ = opts['rev']
5401 5401 message = opts.get('message')
5402 5402 if opts.get('remove'):
5403 5403 expectedtype = opts.get('local') and 'local' or 'global'
5404 5404 for n in names:
5405 5405 if not repo.tagtype(n):
5406 5406 raise util.Abort(_("tag '%s' does not exist") % n)
5407 5407 if repo.tagtype(n) != expectedtype:
5408 5408 if expectedtype == 'global':
5409 5409 raise util.Abort(_("tag '%s' is not a global tag") % n)
5410 5410 else:
5411 5411 raise util.Abort(_("tag '%s' is not a local tag") % n)
5412 5412 rev_ = nullid
5413 5413 if not message:
5414 5414 # we don't translate commit messages
5415 5415 message = 'Removed tag %s' % ', '.join(names)
5416 5416 elif not opts.get('force'):
5417 5417 for n in names:
5418 5418 if n in repo.tags():
5419 5419 raise util.Abort(_("tag '%s' already exists "
5420 5420 "(use -f to force)") % n)
5421 5421 if not opts.get('local'):
5422 5422 p1, p2 = repo.dirstate.parents()
5423 5423 if p2 != nullid:
5424 5424 raise util.Abort(_('uncommitted merge'))
5425 5425 bheads = repo.branchheads()
5426 5426 if not opts.get('force') and bheads and p1 not in bheads:
5427 5427 raise util.Abort(_('not at a branch head (use -f to force)'))
5428 5428 r = scmutil.revsingle(repo, rev_).node()
5429 5429
5430 5430 if not message:
5431 5431 # we don't translate commit messages
5432 5432 message = ('Added tag %s for changeset %s' %
5433 5433 (', '.join(names), short(r)))
5434 5434
5435 5435 date = opts.get('date')
5436 5436 if date:
5437 5437 date = util.parsedate(date)
5438 5438
5439 5439 if opts.get('edit'):
5440 5440 message = ui.edit(message, ui.username())
5441 5441
5442 5442 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
5443 5443
5444 5444 @command('tags', [], '')
5445 5445 def tags(ui, repo):
5446 5446 """list repository tags
5447 5447
5448 5448 This lists both regular and local tags. When the -v/--verbose
5449 5449 switch is used, a third column "local" is printed for local tags.
5450 5450
5451 5451 Returns 0 on success.
5452 5452 """
5453 5453
5454 5454 hexfunc = ui.debugflag and hex or short
5455 5455 tagtype = ""
5456 5456
5457 5457 for t, n in reversed(repo.tagslist()):
5458 5458 if ui.quiet:
5459 5459 ui.write("%s\n" % t, label='tags.normal')
5460 5460 continue
5461 5461
5462 5462 hn = hexfunc(n)
5463 5463 r = "%5d:%s" % (repo.changelog.rev(n), hn)
5464 5464 rev = ui.label(r, 'log.changeset')
5465 5465 spaces = " " * (30 - encoding.colwidth(t))
5466 5466
5467 5467 tag = ui.label(t, 'tags.normal')
5468 5468 if ui.verbose:
5469 5469 if repo.tagtype(t) == 'local':
5470 5470 tagtype = " local"
5471 5471 tag = ui.label(t, 'tags.local')
5472 5472 else:
5473 5473 tagtype = ""
5474 5474 ui.write("%s%s %s%s\n" % (tag, spaces, rev, tagtype))
5475 5475
5476 5476 @command('tip',
5477 5477 [('p', 'patch', None, _('show patch')),
5478 5478 ('g', 'git', None, _('use git extended diff format')),
5479 5479 ] + templateopts,
5480 5480 _('[-p] [-g]'))
5481 5481 def tip(ui, repo, **opts):
5482 5482 """show the tip revision
5483 5483
5484 5484 The tip revision (usually just called the tip) is the changeset
5485 5485 most recently added to the repository (and therefore the most
5486 5486 recently changed head).
5487 5487
5488 5488 If you have just made a commit, that commit will be the tip. If
5489 5489 you have just pulled changes from another repository, the tip of
5490 5490 that repository becomes the current tip. The "tip" tag is special
5491 5491 and cannot be renamed or assigned to a different changeset.
5492 5492
5493 5493 Returns 0 on success.
5494 5494 """
5495 5495 displayer = cmdutil.show_changeset(ui, repo, opts)
5496 5496 displayer.show(repo[len(repo) - 1])
5497 5497 displayer.close()
5498 5498
5499 5499 @command('unbundle',
5500 5500 [('u', 'update', None,
5501 5501 _('update to new branch head if changesets were unbundled'))],
5502 5502 _('[-u] FILE...'))
5503 5503 def unbundle(ui, repo, fname1, *fnames, **opts):
5504 5504 """apply one or more changegroup files
5505 5505
5506 5506 Apply one or more compressed changegroup files generated by the
5507 5507 bundle command.
5508 5508
5509 5509 Returns 0 on success, 1 if an update has unresolved files.
5510 5510 """
5511 5511 fnames = (fname1,) + fnames
5512 5512
5513 5513 lock = repo.lock()
5514 5514 wc = repo['.']
5515 5515 try:
5516 5516 for fname in fnames:
5517 5517 f = url.open(ui, fname)
5518 5518 gen = changegroup.readbundle(f, fname)
5519 5519 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
5520 5520 lock=lock)
5521 5521 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
5522 5522 finally:
5523 5523 lock.release()
5524 5524 return postincoming(ui, repo, modheads, opts.get('update'), None)
5525 5525
5526 5526 @command('^update|up|checkout|co',
5527 5527 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5528 5528 ('c', 'check', None,
5529 5529 _('update across branches if no uncommitted changes')),
5530 5530 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5531 5531 ('r', 'rev', '', _('revision'), _('REV'))],
5532 5532 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5533 5533 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
5534 5534 """update working directory (or switch revisions)
5535 5535
5536 5536 Update the repository's working directory to the specified
5537 5537 changeset. If no changeset is specified, update to the tip of the
5538 5538 current named branch.
5539 5539
5540 5540 If the changeset is not a descendant of the working directory's
5541 5541 parent, the update is aborted. With the -c/--check option, the
5542 5542 working directory is checked for uncommitted changes; if none are
5543 5543 found, the working directory is updated to the specified
5544 5544 changeset.
5545 5545
5546 5546 Update sets the working directory's parent revison to the specified
5547 5547 changeset (see :hg:`help parents`).
5548 5548
5549 5549 The following rules apply when the working directory contains
5550 5550 uncommitted changes:
5551 5551
5552 5552 1. If neither -c/--check nor -C/--clean is specified, and if
5553 5553 the requested changeset is an ancestor or descendant of
5554 5554 the working directory's parent, the uncommitted changes
5555 5555 are merged into the requested changeset and the merged
5556 5556 result is left uncommitted. If the requested changeset is
5557 5557 not an ancestor or descendant (that is, it is on another
5558 5558 branch), the update is aborted and the uncommitted changes
5559 5559 are preserved.
5560 5560
5561 5561 2. With the -c/--check option, the update is aborted and the
5562 5562 uncommitted changes are preserved.
5563 5563
5564 5564 3. With the -C/--clean option, uncommitted changes are discarded and
5565 5565 the working directory is updated to the requested changeset.
5566 5566
5567 5567 Use null as the changeset to remove the working directory (like
5568 5568 :hg:`clone -U`).
5569 5569
5570 5570 If you want to revert just one file to an older revision, use
5571 5571 :hg:`revert [-r REV] NAME`.
5572 5572
5573 5573 See :hg:`help dates` for a list of formats valid for -d/--date.
5574 5574
5575 5575 Returns 0 on success, 1 if there are unresolved files.
5576 5576 """
5577 5577 if rev and node:
5578 5578 raise util.Abort(_("please specify just one revision"))
5579 5579
5580 5580 if rev is None or rev == '':
5581 5581 rev = node
5582 5582
5583 5583 # if we defined a bookmark, we have to remember the original bookmark name
5584 5584 brev = rev
5585 5585 rev = scmutil.revsingle(repo, rev, rev).rev()
5586 5586
5587 5587 if check and clean:
5588 5588 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5589 5589
5590 5590 if check:
5591 5591 # we could use dirty() but we can ignore merge and branch trivia
5592 5592 c = repo[None]
5593 5593 if c.modified() or c.added() or c.removed():
5594 5594 raise util.Abort(_("uncommitted local changes"))
5595 5595
5596 5596 if date:
5597 5597 if rev is not None:
5598 5598 raise util.Abort(_("you can't specify a revision and a date"))
5599 5599 rev = cmdutil.finddate(ui, repo, date)
5600 5600
5601 5601 if clean or check:
5602 5602 ret = hg.clean(repo, rev)
5603 5603 else:
5604 5604 ret = hg.update(repo, rev)
5605 5605
5606 5606 if brev in repo._bookmarks:
5607 5607 bookmarks.setcurrent(repo, brev)
5608 5608
5609 5609 return ret
5610 5610
5611 5611 @command('verify', [])
5612 5612 def verify(ui, repo):
5613 5613 """verify the integrity of the repository
5614 5614
5615 5615 Verify the integrity of the current repository.
5616 5616
5617 5617 This will perform an extensive check of the repository's
5618 5618 integrity, validating the hashes and checksums of each entry in
5619 5619 the changelog, manifest, and tracked files, as well as the
5620 5620 integrity of their crosslinks and indices.
5621 5621
5622 5622 Returns 0 on success, 1 if errors are encountered.
5623 5623 """
5624 5624 return hg.verify(repo)
5625 5625
5626 5626 @command('version', [])
5627 5627 def version_(ui):
5628 5628 """output version and copyright information"""
5629 5629 ui.write(_("Mercurial Distributed SCM (version %s)\n")
5630 5630 % util.version())
5631 5631 ui.status(_(
5632 5632 "(see http://mercurial.selenic.com for more information)\n"
5633 5633 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
5634 5634 "This is free software; see the source for copying conditions. "
5635 5635 "There is NO\nwarranty; "
5636 5636 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5637 5637 ))
5638 5638
5639 5639 norepo = ("clone init version help debugcommands debugcomplete"
5640 5640 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
5641 5641 " debugknown debuggetbundle debugbundle")
5642 5642 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
5643 5643 " debugdata debugindex debugindexdot debugrevlog")
@@ -1,578 +1,578
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from i18n import _
10 10 from lock import release
11 11 from node import hex, nullid
12 12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo, bookmarks
13 13 import lock, util, extensions, error, node
14 14 import cmdutil, discovery
15 15 import merge as mergemod
16 16 import verify as verifymod
17 17 import errno, os, shutil
18 18
19 19 def _local(path):
20 20 path = util.expandpath(util.urllocalpath(path))
21 21 return (os.path.isfile(path) and bundlerepo or localrepo)
22 22
23 23 def addbranchrevs(lrepo, repo, branches, revs):
24 24 hashbranch, branches = branches
25 25 if not hashbranch and not branches:
26 26 return revs or None, revs and revs[0] or None
27 27 revs = revs and list(revs) or []
28 28 if not repo.capable('branchmap'):
29 29 if branches:
30 30 raise util.Abort(_("remote branch lookup not supported"))
31 31 revs.append(hashbranch)
32 32 return revs, revs[0]
33 33 branchmap = repo.branchmap()
34 34
35 35 def primary(branch):
36 36 if branch == '.':
37 37 if not lrepo or not lrepo.local():
38 38 raise util.Abort(_("dirstate branch not accessible"))
39 39 branch = lrepo.dirstate.branch()
40 40 if branch in branchmap:
41 41 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
42 42 return True
43 43 else:
44 44 return False
45 45
46 46 for branch in branches:
47 47 if not primary(branch):
48 48 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
49 49 if hashbranch:
50 50 if not primary(hashbranch):
51 51 revs.append(hashbranch)
52 52 return revs, revs[0]
53 53
54 54 def parseurl(path, branches=None):
55 55 '''parse url#branch, returning (url, (branch, branches))'''
56 56
57 57 u = util.url(path)
58 58 branch = None
59 59 if u.fragment:
60 60 branch = u.fragment
61 61 u.fragment = None
62 62 return str(u), (branch, branches or [])
63 63
64 64 schemes = {
65 65 'bundle': bundlerepo,
66 66 'file': _local,
67 67 'http': httprepo,
68 68 'https': httprepo,
69 69 'ssh': sshrepo,
70 70 'static-http': statichttprepo,
71 71 }
72 72
73 73 def _peerlookup(path):
74 74 u = util.url(path)
75 75 scheme = u.scheme or 'file'
76 76 thing = schemes.get(scheme) or schemes['file']
77 77 try:
78 78 return thing(path)
79 79 except TypeError:
80 80 return thing
81 81
82 82 def islocal(repo):
83 83 '''return true if repo or path is local'''
84 84 if isinstance(repo, str):
85 85 try:
86 86 return _peerlookup(repo).islocal(repo)
87 87 except AttributeError:
88 88 return False
89 89 return repo.local()
90 90
91 91 def repository(ui, path='', create=False):
92 92 """return a repository object for the specified path"""
93 93 repo = _peerlookup(path).instance(ui, path, create)
94 94 ui = getattr(repo, "ui", ui)
95 95 for name, module in extensions.extensions():
96 96 hook = getattr(module, 'reposetup', None)
97 97 if hook:
98 98 hook(ui, repo)
99 99 return repo
100 100
101 101 def peer(uiorrepo, opts, path, create=False):
102 102 '''return a repository peer for the specified path'''
103 103 rui = remoteui(uiorrepo, opts)
104 104 return repository(rui, path, create)
105 105
106 106 def defaultdest(source):
107 107 '''return default destination of clone if none is given'''
108 108 return os.path.basename(os.path.normpath(source))
109 109
110 110 def share(ui, source, dest=None, update=True):
111 111 '''create a shared repository'''
112 112
113 113 if not islocal(source):
114 114 raise util.Abort(_('can only share local repositories'))
115 115
116 116 if not dest:
117 117 dest = defaultdest(source)
118 118 else:
119 119 dest = ui.expandpath(dest)
120 120
121 121 if isinstance(source, str):
122 122 origsource = ui.expandpath(source)
123 123 source, branches = parseurl(origsource)
124 124 srcrepo = repository(ui, source)
125 125 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
126 126 else:
127 127 srcrepo = source
128 128 origsource = source = srcrepo.url()
129 129 checkout = None
130 130
131 131 sharedpath = srcrepo.sharedpath # if our source is already sharing
132 132
133 root = os.path.realpath(dest)
133 root = util.realpath(dest)
134 134 roothg = os.path.join(root, '.hg')
135 135
136 136 if os.path.exists(roothg):
137 137 raise util.Abort(_('destination already exists'))
138 138
139 139 if not os.path.isdir(root):
140 140 os.mkdir(root)
141 141 util.makedir(roothg, notindexed=True)
142 142
143 143 requirements = ''
144 144 try:
145 145 requirements = srcrepo.opener.read('requires')
146 146 except IOError, inst:
147 147 if inst.errno != errno.ENOENT:
148 148 raise
149 149
150 150 requirements += 'shared\n'
151 151 util.writefile(os.path.join(roothg, 'requires'), requirements)
152 152 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
153 153
154 154 r = repository(ui, root)
155 155
156 156 default = srcrepo.ui.config('paths', 'default')
157 157 if default:
158 158 fp = r.opener("hgrc", "w", text=True)
159 159 fp.write("[paths]\n")
160 160 fp.write("default = %s\n" % default)
161 161 fp.close()
162 162
163 163 if update:
164 164 r.ui.status(_("updating working directory\n"))
165 165 if update is not True:
166 166 checkout = update
167 167 for test in (checkout, 'default', 'tip'):
168 168 if test is None:
169 169 continue
170 170 try:
171 171 uprev = r.lookup(test)
172 172 break
173 173 except error.RepoLookupError:
174 174 continue
175 175 _update(r, uprev)
176 176
177 177 def copystore(ui, srcrepo, destpath):
178 178 '''copy files from store of srcrepo in destpath
179 179
180 180 returns destlock
181 181 '''
182 182 destlock = None
183 183 try:
184 184 hardlink = None
185 185 num = 0
186 186 for f in srcrepo.store.copylist():
187 187 src = os.path.join(srcrepo.sharedpath, f)
188 188 dst = os.path.join(destpath, f)
189 189 dstbase = os.path.dirname(dst)
190 190 if dstbase and not os.path.exists(dstbase):
191 191 os.mkdir(dstbase)
192 192 if os.path.exists(src):
193 193 if dst.endswith('data'):
194 194 # lock to avoid premature writing to the target
195 195 destlock = lock.lock(os.path.join(dstbase, "lock"))
196 196 hardlink, n = util.copyfiles(src, dst, hardlink)
197 197 num += n
198 198 if hardlink:
199 199 ui.debug("linked %d files\n" % num)
200 200 else:
201 201 ui.debug("copied %d files\n" % num)
202 202 return destlock
203 203 except:
204 204 release(destlock)
205 205 raise
206 206
207 207 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
208 208 update=True, stream=False, branch=None):
209 209 """Make a copy of an existing repository.
210 210
211 211 Create a copy of an existing repository in a new directory. The
212 212 source and destination are URLs, as passed to the repository
213 213 function. Returns a pair of repository objects, the source and
214 214 newly created destination.
215 215
216 216 The location of the source is added to the new repository's
217 217 .hg/hgrc file, as the default to be used for future pulls and
218 218 pushes.
219 219
220 220 If an exception is raised, the partly cloned/updated destination
221 221 repository will be deleted.
222 222
223 223 Arguments:
224 224
225 225 source: repository object or URL
226 226
227 227 dest: URL of destination repository to create (defaults to base
228 228 name of source repository)
229 229
230 230 pull: always pull from source repository, even in local case
231 231
232 232 stream: stream raw data uncompressed from repository (fast over
233 233 LAN, slow over WAN)
234 234
235 235 rev: revision to clone up to (implies pull=True)
236 236
237 237 update: update working directory after clone completes, if
238 238 destination is local repository (True means update to default rev,
239 239 anything else is treated as a revision)
240 240
241 241 branch: branches to clone
242 242 """
243 243
244 244 if isinstance(source, str):
245 245 origsource = ui.expandpath(source)
246 246 source, branch = parseurl(origsource, branch)
247 247 srcrepo = repository(remoteui(ui, peeropts), source)
248 248 else:
249 249 srcrepo = source
250 250 branch = (None, branch or [])
251 251 origsource = source = srcrepo.url()
252 252 rev, checkout = addbranchrevs(srcrepo, srcrepo, branch, rev)
253 253
254 254 if dest is None:
255 255 dest = defaultdest(source)
256 256 ui.status(_("destination directory: %s\n") % dest)
257 257 else:
258 258 dest = ui.expandpath(dest)
259 259
260 260 dest = util.urllocalpath(dest)
261 261 source = util.urllocalpath(source)
262 262
263 263 if os.path.exists(dest):
264 264 if not os.path.isdir(dest):
265 265 raise util.Abort(_("destination '%s' already exists") % dest)
266 266 elif os.listdir(dest):
267 267 raise util.Abort(_("destination '%s' is not empty") % dest)
268 268
269 269 class DirCleanup(object):
270 270 def __init__(self, dir_):
271 271 self.rmtree = shutil.rmtree
272 272 self.dir_ = dir_
273 273 def close(self):
274 274 self.dir_ = None
275 275 def cleanup(self):
276 276 if self.dir_:
277 277 self.rmtree(self.dir_, True)
278 278
279 279 srclock = destlock = dircleanup = None
280 280 try:
281 281 abspath = origsource
282 282 if islocal(origsource):
283 283 abspath = os.path.abspath(util.urllocalpath(origsource))
284 284
285 285 if islocal(dest):
286 286 dircleanup = DirCleanup(dest)
287 287
288 288 copy = False
289 289 if srcrepo.cancopy() and islocal(dest):
290 290 copy = not pull and not rev
291 291
292 292 if copy:
293 293 try:
294 294 # we use a lock here because if we race with commit, we
295 295 # can end up with extra data in the cloned revlogs that's
296 296 # not pointed to by changesets, thus causing verify to
297 297 # fail
298 298 srclock = srcrepo.lock(wait=False)
299 299 except error.LockError:
300 300 copy = False
301 301
302 302 if copy:
303 303 srcrepo.hook('preoutgoing', throw=True, source='clone')
304 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
304 hgdir = util.realpath(os.path.join(dest, ".hg"))
305 305 if not os.path.exists(dest):
306 306 os.mkdir(dest)
307 307 else:
308 308 # only clean up directories we create ourselves
309 309 dircleanup.dir_ = hgdir
310 310 try:
311 311 destpath = hgdir
312 312 util.makedir(destpath, notindexed=True)
313 313 except OSError, inst:
314 314 if inst.errno == errno.EEXIST:
315 315 dircleanup.close()
316 316 raise util.Abort(_("destination '%s' already exists")
317 317 % dest)
318 318 raise
319 319
320 320 destlock = copystore(ui, srcrepo, destpath)
321 321
322 322 # we need to re-init the repo after manually copying the data
323 323 # into it
324 324 destrepo = repository(remoteui(ui, peeropts), dest)
325 325 srcrepo.hook('outgoing', source='clone',
326 326 node=node.hex(node.nullid))
327 327 else:
328 328 try:
329 329 destrepo = repository(remoteui(ui, peeropts), dest,
330 330 create=True)
331 331 except OSError, inst:
332 332 if inst.errno == errno.EEXIST:
333 333 dircleanup.close()
334 334 raise util.Abort(_("destination '%s' already exists")
335 335 % dest)
336 336 raise
337 337
338 338 revs = None
339 339 if rev:
340 340 if not srcrepo.capable('lookup'):
341 341 raise util.Abort(_("src repository does not support "
342 342 "revision lookup and so doesn't "
343 343 "support clone by revision"))
344 344 revs = [srcrepo.lookup(r) for r in rev]
345 345 checkout = revs[0]
346 346 if destrepo.local():
347 347 destrepo.clone(srcrepo, heads=revs, stream=stream)
348 348 elif srcrepo.local():
349 349 srcrepo.push(destrepo, revs=revs)
350 350 else:
351 351 raise util.Abort(_("clone from remote to remote not supported"))
352 352
353 353 if dircleanup:
354 354 dircleanup.close()
355 355
356 356 if destrepo.local():
357 357 fp = destrepo.opener("hgrc", "w", text=True)
358 358 fp.write("[paths]\n")
359 359 fp.write("default = %s\n" % abspath)
360 360 fp.close()
361 361
362 362 destrepo.ui.setconfig('paths', 'default', abspath)
363 363
364 364 if update:
365 365 if update is not True:
366 366 checkout = update
367 367 if srcrepo.local():
368 368 checkout = srcrepo.lookup(update)
369 369 for test in (checkout, 'default', 'tip'):
370 370 if test is None:
371 371 continue
372 372 try:
373 373 uprev = destrepo.lookup(test)
374 374 break
375 375 except error.RepoLookupError:
376 376 continue
377 377 bn = destrepo[uprev].branch()
378 378 destrepo.ui.status(_("updating to branch %s\n") % bn)
379 379 _update(destrepo, uprev)
380 380
381 381 # clone all bookmarks
382 382 if destrepo.local() and srcrepo.capable("pushkey"):
383 383 rb = srcrepo.listkeys('bookmarks')
384 384 for k, n in rb.iteritems():
385 385 try:
386 386 m = destrepo.lookup(n)
387 387 destrepo._bookmarks[k] = m
388 388 except error.RepoLookupError:
389 389 pass
390 390 if rb:
391 391 bookmarks.write(destrepo)
392 392 elif srcrepo.local() and destrepo.capable("pushkey"):
393 393 for k, n in srcrepo._bookmarks.iteritems():
394 394 destrepo.pushkey('bookmarks', k, '', hex(n))
395 395
396 396 return srcrepo, destrepo
397 397 finally:
398 398 release(srclock, destlock)
399 399 if dircleanup is not None:
400 400 dircleanup.cleanup()
401 401
402 402 def _showstats(repo, stats):
403 403 repo.ui.status(_("%d files updated, %d files merged, "
404 404 "%d files removed, %d files unresolved\n") % stats)
405 405
406 406 def update(repo, node):
407 407 """update the working directory to node, merging linear changes"""
408 408 stats = mergemod.update(repo, node, False, False, None)
409 409 _showstats(repo, stats)
410 410 if stats[3]:
411 411 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
412 412 return stats[3] > 0
413 413
414 414 # naming conflict in clone()
415 415 _update = update
416 416
417 417 def clean(repo, node, show_stats=True):
418 418 """forcibly switch the working directory to node, clobbering changes"""
419 419 stats = mergemod.update(repo, node, False, True, None)
420 420 if show_stats:
421 421 _showstats(repo, stats)
422 422 return stats[3] > 0
423 423
424 424 def merge(repo, node, force=None, remind=True):
425 425 """Branch merge with node, resolving changes. Return true if any
426 426 unresolved conflicts."""
427 427 stats = mergemod.update(repo, node, True, force, False)
428 428 _showstats(repo, stats)
429 429 if stats[3]:
430 430 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
431 431 "or 'hg update -C .' to abandon\n"))
432 432 elif remind:
433 433 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
434 434 return stats[3] > 0
435 435
436 436 def _incoming(displaychlist, subreporecurse, ui, repo, source,
437 437 opts, buffered=False):
438 438 """
439 439 Helper for incoming / gincoming.
440 440 displaychlist gets called with
441 441 (remoterepo, incomingchangesetlist, displayer) parameters,
442 442 and is supposed to contain only code that can't be unified.
443 443 """
444 444 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
445 445 other = peer(repo, opts, source)
446 446 ui.status(_('comparing with %s\n') % util.hidepassword(source))
447 447 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
448 448
449 449 if revs:
450 450 revs = [other.lookup(rev) for rev in revs]
451 451 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
452 452 revs, opts["bundle"], opts["force"])
453 453 try:
454 454 if not chlist:
455 455 ui.status(_("no changes found\n"))
456 456 return subreporecurse()
457 457
458 458 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
459 459
460 460 # XXX once graphlog extension makes it into core,
461 461 # should be replaced by a if graph/else
462 462 displaychlist(other, chlist, displayer)
463 463
464 464 displayer.close()
465 465 finally:
466 466 cleanupfn()
467 467 subreporecurse()
468 468 return 0 # exit code is zero since we found incoming changes
469 469
470 470 def incoming(ui, repo, source, opts):
471 471 def subreporecurse():
472 472 ret = 1
473 473 if opts.get('subrepos'):
474 474 ctx = repo[None]
475 475 for subpath in sorted(ctx.substate):
476 476 sub = ctx.sub(subpath)
477 477 ret = min(ret, sub.incoming(ui, source, opts))
478 478 return ret
479 479
480 480 def display(other, chlist, displayer):
481 481 limit = cmdutil.loglimit(opts)
482 482 if opts.get('newest_first'):
483 483 chlist.reverse()
484 484 count = 0
485 485 for n in chlist:
486 486 if limit is not None and count >= limit:
487 487 break
488 488 parents = [p for p in other.changelog.parents(n) if p != nullid]
489 489 if opts.get('no_merges') and len(parents) == 2:
490 490 continue
491 491 count += 1
492 492 displayer.show(other[n])
493 493 return _incoming(display, subreporecurse, ui, repo, source, opts)
494 494
495 495 def _outgoing(ui, repo, dest, opts):
496 496 dest = ui.expandpath(dest or 'default-push', dest or 'default')
497 497 dest, branches = parseurl(dest, opts.get('branch'))
498 498 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
499 499 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
500 500 if revs:
501 501 revs = [repo.lookup(rev) for rev in revs]
502 502
503 503 other = peer(repo, opts, dest)
504 504 common, outheads = discovery.findcommonoutgoing(repo, other, revs,
505 505 force=opts.get('force'))
506 506 o = repo.changelog.findmissing(common, outheads)
507 507 if not o:
508 508 ui.status(_("no changes found\n"))
509 509 return None
510 510 return o
511 511
512 512 def outgoing(ui, repo, dest, opts):
513 513 def recurse():
514 514 ret = 1
515 515 if opts.get('subrepos'):
516 516 ctx = repo[None]
517 517 for subpath in sorted(ctx.substate):
518 518 sub = ctx.sub(subpath)
519 519 ret = min(ret, sub.outgoing(ui, dest, opts))
520 520 return ret
521 521
522 522 limit = cmdutil.loglimit(opts)
523 523 o = _outgoing(ui, repo, dest, opts)
524 524 if o is None:
525 525 return recurse()
526 526
527 527 if opts.get('newest_first'):
528 528 o.reverse()
529 529 displayer = cmdutil.show_changeset(ui, repo, opts)
530 530 count = 0
531 531 for n in o:
532 532 if limit is not None and count >= limit:
533 533 break
534 534 parents = [p for p in repo.changelog.parents(n) if p != nullid]
535 535 if opts.get('no_merges') and len(parents) == 2:
536 536 continue
537 537 count += 1
538 538 displayer.show(repo[n])
539 539 displayer.close()
540 540 recurse()
541 541 return 0 # exit code is zero since we found outgoing changes
542 542
543 543 def revert(repo, node, choose):
544 544 """revert changes to revision in node without updating dirstate"""
545 545 return mergemod.update(repo, node, False, True, choose)[3] > 0
546 546
547 547 def verify(repo):
548 548 """verify the consistency of a repository"""
549 549 return verifymod.verify(repo)
550 550
551 551 def remoteui(src, opts):
552 552 'build a remote ui from ui or repo and opts'
553 553 if util.safehasattr(src, 'baseui'): # looks like a repository
554 554 dst = src.baseui.copy() # drop repo-specific config
555 555 src = src.ui # copy target options from repo
556 556 else: # assume it's a global ui object
557 557 dst = src.copy() # keep all global options
558 558
559 559 # copy ssh-specific options
560 560 for o in 'ssh', 'remotecmd':
561 561 v = opts.get(o) or src.config('ui', o)
562 562 if v:
563 563 dst.setconfig("ui", o, v)
564 564
565 565 # copy bundle-specific options
566 566 r = src.config('bundle', 'mainreporoot')
567 567 if r:
568 568 dst.setconfig('bundle', 'mainreporoot', r)
569 569
570 570 # copy selected local settings to the remote ui
571 571 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
572 572 for key, val in src.configitems(sect):
573 573 dst.setconfig(sect, key, val)
574 574 v = src.config('web', 'cacerts')
575 575 if v:
576 576 dst.setconfig('web', 'cacerts', util.expandpath(v))
577 577
578 578 return dst
@@ -1,2101 +1,2101
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup, subrepo, discovery, pushkey
11 11 import changelog, dirstate, filelog, manifest, context, bookmarks
12 12 import lock, transaction, store, encoding
13 13 import scmutil, util, extensions, hook, error, revset
14 14 import match as matchmod
15 15 import merge as mergemod
16 16 import tags as tagsmod
17 17 from lock import release
18 18 import weakref, errno, os, time, inspect
19 19 propertycache = util.propertycache
20 20 filecache = scmutil.filecache
21 21
22 22 class localrepository(repo.repository):
23 23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
24 24 'known', 'getbundle'))
25 25 supportedformats = set(('revlogv1', 'generaldelta'))
26 26 supported = supportedformats | set(('store', 'fncache', 'shared',
27 27 'dotencode'))
28 28
29 29 def __init__(self, baseui, path=None, create=False):
30 30 repo.repository.__init__(self)
31 self.root = os.path.realpath(util.expandpath(path))
31 self.root = util.realpath(util.expandpath(path))
32 32 self.path = os.path.join(self.root, ".hg")
33 33 self.origroot = path
34 34 self.auditor = scmutil.pathauditor(self.root, self._checknested)
35 35 self.opener = scmutil.opener(self.path)
36 36 self.wopener = scmutil.opener(self.root)
37 37 self.baseui = baseui
38 38 self.ui = baseui.copy()
39 39
40 40 try:
41 41 self.ui.readconfig(self.join("hgrc"), self.root)
42 42 extensions.loadall(self.ui)
43 43 except IOError:
44 44 pass
45 45
46 46 if not os.path.isdir(self.path):
47 47 if create:
48 48 if not os.path.exists(path):
49 49 util.makedirs(path)
50 50 util.makedir(self.path, notindexed=True)
51 51 requirements = ["revlogv1"]
52 52 if self.ui.configbool('format', 'usestore', True):
53 53 os.mkdir(os.path.join(self.path, "store"))
54 54 requirements.append("store")
55 55 if self.ui.configbool('format', 'usefncache', True):
56 56 requirements.append("fncache")
57 57 if self.ui.configbool('format', 'dotencode', True):
58 58 requirements.append('dotencode')
59 59 # create an invalid changelog
60 60 self.opener.append(
61 61 "00changelog.i",
62 62 '\0\0\0\2' # represents revlogv2
63 63 ' dummy changelog to prevent using the old repo layout'
64 64 )
65 65 if self.ui.configbool('format', 'generaldelta', False):
66 66 requirements.append("generaldelta")
67 67 requirements = set(requirements)
68 68 else:
69 69 raise error.RepoError(_("repository %s not found") % path)
70 70 elif create:
71 71 raise error.RepoError(_("repository %s already exists") % path)
72 72 else:
73 73 try:
74 74 requirements = scmutil.readrequires(self.opener, self.supported)
75 75 except IOError, inst:
76 76 if inst.errno != errno.ENOENT:
77 77 raise
78 78 requirements = set()
79 79
80 80 self.sharedpath = self.path
81 81 try:
82 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
82 s = util.realpath(self.opener.read("sharedpath").rstrip('\n'))
83 83 if not os.path.exists(s):
84 84 raise error.RepoError(
85 85 _('.hg/sharedpath points to nonexistent directory %s') % s)
86 86 self.sharedpath = s
87 87 except IOError, inst:
88 88 if inst.errno != errno.ENOENT:
89 89 raise
90 90
91 91 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
92 92 self.spath = self.store.path
93 93 self.sopener = self.store.opener
94 94 self.sjoin = self.store.join
95 95 self.opener.createmode = self.store.createmode
96 96 self._applyrequirements(requirements)
97 97 if create:
98 98 self._writerequirements()
99 99
100 100
101 101 self._branchcache = None
102 102 self._branchcachetip = None
103 103 self.filterpats = {}
104 104 self._datafilters = {}
105 105 self._transref = self._lockref = self._wlockref = None
106 106
107 107 # A cache for various files under .hg/ that tracks file changes,
108 108 # (used by the filecache decorator)
109 109 #
110 110 # Maps a property name to its util.filecacheentry
111 111 self._filecache = {}
112 112
113 113 def _applyrequirements(self, requirements):
114 114 self.requirements = requirements
115 115 openerreqs = set(('revlogv1', 'generaldelta'))
116 116 self.sopener.options = dict((r, 1) for r in requirements
117 117 if r in openerreqs)
118 118
119 119 def _writerequirements(self):
120 120 reqfile = self.opener("requires", "w")
121 121 for r in self.requirements:
122 122 reqfile.write("%s\n" % r)
123 123 reqfile.close()
124 124
125 125 def _checknested(self, path):
126 126 """Determine if path is a legal nested repository."""
127 127 if not path.startswith(self.root):
128 128 return False
129 129 subpath = path[len(self.root) + 1:]
130 130
131 131 # XXX: Checking against the current working copy is wrong in
132 132 # the sense that it can reject things like
133 133 #
134 134 # $ hg cat -r 10 sub/x.txt
135 135 #
136 136 # if sub/ is no longer a subrepository in the working copy
137 137 # parent revision.
138 138 #
139 139 # However, it can of course also allow things that would have
140 140 # been rejected before, such as the above cat command if sub/
141 141 # is a subrepository now, but was a normal directory before.
142 142 # The old path auditor would have rejected by mistake since it
143 143 # panics when it sees sub/.hg/.
144 144 #
145 145 # All in all, checking against the working copy seems sensible
146 146 # since we want to prevent access to nested repositories on
147 147 # the filesystem *now*.
148 148 ctx = self[None]
149 149 parts = util.splitpath(subpath)
150 150 while parts:
151 151 prefix = os.sep.join(parts)
152 152 if prefix in ctx.substate:
153 153 if prefix == subpath:
154 154 return True
155 155 else:
156 156 sub = ctx.sub(prefix)
157 157 return sub.checknested(subpath[len(prefix) + 1:])
158 158 else:
159 159 parts.pop()
160 160 return False
161 161
162 162 @filecache('bookmarks')
163 163 def _bookmarks(self):
164 164 return bookmarks.read(self)
165 165
166 166 @filecache('bookmarks.current')
167 167 def _bookmarkcurrent(self):
168 168 return bookmarks.readcurrent(self)
169 169
170 170 def _writebookmarks(self, marks):
171 171 bookmarks.write(self)
172 172
173 173 @filecache('00changelog.i', True)
174 174 def changelog(self):
175 175 c = changelog.changelog(self.sopener)
176 176 if 'HG_PENDING' in os.environ:
177 177 p = os.environ['HG_PENDING']
178 178 if p.startswith(self.root):
179 179 c.readpending('00changelog.i.a')
180 180 return c
181 181
182 182 @filecache('00manifest.i', True)
183 183 def manifest(self):
184 184 return manifest.manifest(self.sopener)
185 185
186 186 @filecache('dirstate')
187 187 def dirstate(self):
188 188 warned = [0]
189 189 def validate(node):
190 190 try:
191 191 self.changelog.rev(node)
192 192 return node
193 193 except error.LookupError:
194 194 if not warned[0]:
195 195 warned[0] = True
196 196 self.ui.warn(_("warning: ignoring unknown"
197 197 " working parent %s!\n") % short(node))
198 198 return nullid
199 199
200 200 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
201 201
202 202 def __getitem__(self, changeid):
203 203 if changeid is None:
204 204 return context.workingctx(self)
205 205 return context.changectx(self, changeid)
206 206
207 207 def __contains__(self, changeid):
208 208 try:
209 209 return bool(self.lookup(changeid))
210 210 except error.RepoLookupError:
211 211 return False
212 212
213 213 def __nonzero__(self):
214 214 return True
215 215
216 216 def __len__(self):
217 217 return len(self.changelog)
218 218
219 219 def __iter__(self):
220 220 for i in xrange(len(self)):
221 221 yield i
222 222
223 223 def set(self, expr, *args):
224 224 '''
225 225 Yield a context for each matching revision, after doing arg
226 226 replacement via revset.formatspec
227 227 '''
228 228
229 229 expr = revset.formatspec(expr, *args)
230 230 m = revset.match(None, expr)
231 231 for r in m(self, range(len(self))):
232 232 yield self[r]
233 233
234 234 def url(self):
235 235 return 'file:' + self.root
236 236
237 237 def hook(self, name, throw=False, **args):
238 238 return hook.hook(self.ui, self, name, throw, **args)
239 239
240 240 tag_disallowed = ':\r\n'
241 241
242 242 def _tag(self, names, node, message, local, user, date, extra={}):
243 243 if isinstance(names, str):
244 244 allchars = names
245 245 names = (names,)
246 246 else:
247 247 allchars = ''.join(names)
248 248 for c in self.tag_disallowed:
249 249 if c in allchars:
250 250 raise util.Abort(_('%r cannot be used in a tag name') % c)
251 251
252 252 branches = self.branchmap()
253 253 for name in names:
254 254 self.hook('pretag', throw=True, node=hex(node), tag=name,
255 255 local=local)
256 256 if name in branches:
257 257 self.ui.warn(_("warning: tag %s conflicts with existing"
258 258 " branch name\n") % name)
259 259
260 260 def writetags(fp, names, munge, prevtags):
261 261 fp.seek(0, 2)
262 262 if prevtags and prevtags[-1] != '\n':
263 263 fp.write('\n')
264 264 for name in names:
265 265 m = munge and munge(name) or name
266 266 if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
267 267 old = self.tags().get(name, nullid)
268 268 fp.write('%s %s\n' % (hex(old), m))
269 269 fp.write('%s %s\n' % (hex(node), m))
270 270 fp.close()
271 271
272 272 prevtags = ''
273 273 if local:
274 274 try:
275 275 fp = self.opener('localtags', 'r+')
276 276 except IOError:
277 277 fp = self.opener('localtags', 'a')
278 278 else:
279 279 prevtags = fp.read()
280 280
281 281 # local tags are stored in the current charset
282 282 writetags(fp, names, None, prevtags)
283 283 for name in names:
284 284 self.hook('tag', node=hex(node), tag=name, local=local)
285 285 return
286 286
287 287 try:
288 288 fp = self.wfile('.hgtags', 'rb+')
289 289 except IOError, e:
290 290 if e.errno != errno.ENOENT:
291 291 raise
292 292 fp = self.wfile('.hgtags', 'ab')
293 293 else:
294 294 prevtags = fp.read()
295 295
296 296 # committed tags are stored in UTF-8
297 297 writetags(fp, names, encoding.fromlocal, prevtags)
298 298
299 299 fp.close()
300 300
301 301 if '.hgtags' not in self.dirstate:
302 302 self[None].add(['.hgtags'])
303 303
304 304 m = matchmod.exact(self.root, '', ['.hgtags'])
305 305 tagnode = self.commit(message, user, date, extra=extra, match=m)
306 306
307 307 for name in names:
308 308 self.hook('tag', node=hex(node), tag=name, local=local)
309 309
310 310 return tagnode
311 311
312 312 def tag(self, names, node, message, local, user, date):
313 313 '''tag a revision with one or more symbolic names.
314 314
315 315 names is a list of strings or, when adding a single tag, names may be a
316 316 string.
317 317
318 318 if local is True, the tags are stored in a per-repository file.
319 319 otherwise, they are stored in the .hgtags file, and a new
320 320 changeset is committed with the change.
321 321
322 322 keyword arguments:
323 323
324 324 local: whether to store tags in non-version-controlled file
325 325 (default False)
326 326
327 327 message: commit message to use if committing
328 328
329 329 user: name of user to use if committing
330 330
331 331 date: date tuple to use if committing'''
332 332
333 333 if not local:
334 334 for x in self.status()[:5]:
335 335 if '.hgtags' in x:
336 336 raise util.Abort(_('working copy of .hgtags is changed '
337 337 '(please commit .hgtags manually)'))
338 338
339 339 self.tags() # instantiate the cache
340 340 self._tag(names, node, message, local, user, date)
341 341
342 342 @propertycache
343 343 def _tagscache(self):
344 344 '''Returns a tagscache object that contains various tags related caches.'''
345 345
346 346 # This simplifies its cache management by having one decorated
347 347 # function (this one) and the rest simply fetch things from it.
348 348 class tagscache(object):
349 349 def __init__(self):
350 350 # These two define the set of tags for this repository. tags
351 351 # maps tag name to node; tagtypes maps tag name to 'global' or
352 352 # 'local'. (Global tags are defined by .hgtags across all
353 353 # heads, and local tags are defined in .hg/localtags.)
354 354 # They constitute the in-memory cache of tags.
355 355 self.tags = self.tagtypes = None
356 356
357 357 self.nodetagscache = self.tagslist = None
358 358
359 359 cache = tagscache()
360 360 cache.tags, cache.tagtypes = self._findtags()
361 361
362 362 return cache
363 363
364 364 def tags(self):
365 365 '''return a mapping of tag to node'''
366 366 return self._tagscache.tags
367 367
368 368 def _findtags(self):
369 369 '''Do the hard work of finding tags. Return a pair of dicts
370 370 (tags, tagtypes) where tags maps tag name to node, and tagtypes
371 371 maps tag name to a string like \'global\' or \'local\'.
372 372 Subclasses or extensions are free to add their own tags, but
373 373 should be aware that the returned dicts will be retained for the
374 374 duration of the localrepo object.'''
375 375
376 376 # XXX what tagtype should subclasses/extensions use? Currently
377 377 # mq and bookmarks add tags, but do not set the tagtype at all.
378 378 # Should each extension invent its own tag type? Should there
379 379 # be one tagtype for all such "virtual" tags? Or is the status
380 380 # quo fine?
381 381
382 382 alltags = {} # map tag name to (node, hist)
383 383 tagtypes = {}
384 384
385 385 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
386 386 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
387 387
388 388 # Build the return dicts. Have to re-encode tag names because
389 389 # the tags module always uses UTF-8 (in order not to lose info
390 390 # writing to the cache), but the rest of Mercurial wants them in
391 391 # local encoding.
392 392 tags = {}
393 393 for (name, (node, hist)) in alltags.iteritems():
394 394 if node != nullid:
395 395 try:
396 396 # ignore tags to unknown nodes
397 397 self.changelog.lookup(node)
398 398 tags[encoding.tolocal(name)] = node
399 399 except error.LookupError:
400 400 pass
401 401 tags['tip'] = self.changelog.tip()
402 402 tagtypes = dict([(encoding.tolocal(name), value)
403 403 for (name, value) in tagtypes.iteritems()])
404 404 return (tags, tagtypes)
405 405
406 406 def tagtype(self, tagname):
407 407 '''
408 408 return the type of the given tag. result can be:
409 409
410 410 'local' : a local tag
411 411 'global' : a global tag
412 412 None : tag does not exist
413 413 '''
414 414
415 415 return self._tagscache.tagtypes.get(tagname)
416 416
417 417 def tagslist(self):
418 418 '''return a list of tags ordered by revision'''
419 419 if not self._tagscache.tagslist:
420 420 l = []
421 421 for t, n in self.tags().iteritems():
422 422 r = self.changelog.rev(n)
423 423 l.append((r, t, n))
424 424 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
425 425
426 426 return self._tagscache.tagslist
427 427
428 428 def nodetags(self, node):
429 429 '''return the tags associated with a node'''
430 430 if not self._tagscache.nodetagscache:
431 431 nodetagscache = {}
432 432 for t, n in self.tags().iteritems():
433 433 nodetagscache.setdefault(n, []).append(t)
434 434 for tags in nodetagscache.itervalues():
435 435 tags.sort()
436 436 self._tagscache.nodetagscache = nodetagscache
437 437 return self._tagscache.nodetagscache.get(node, [])
438 438
439 439 def nodebookmarks(self, node):
440 440 marks = []
441 441 for bookmark, n in self._bookmarks.iteritems():
442 442 if n == node:
443 443 marks.append(bookmark)
444 444 return sorted(marks)
445 445
446 446 def _branchtags(self, partial, lrev):
447 447 # TODO: rename this function?
448 448 tiprev = len(self) - 1
449 449 if lrev != tiprev:
450 450 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
451 451 self._updatebranchcache(partial, ctxgen)
452 452 self._writebranchcache(partial, self.changelog.tip(), tiprev)
453 453
454 454 return partial
455 455
456 456 def updatebranchcache(self):
457 457 tip = self.changelog.tip()
458 458 if self._branchcache is not None and self._branchcachetip == tip:
459 459 return self._branchcache
460 460
461 461 oldtip = self._branchcachetip
462 462 self._branchcachetip = tip
463 463 if oldtip is None or oldtip not in self.changelog.nodemap:
464 464 partial, last, lrev = self._readbranchcache()
465 465 else:
466 466 lrev = self.changelog.rev(oldtip)
467 467 partial = self._branchcache
468 468
469 469 self._branchtags(partial, lrev)
470 470 # this private cache holds all heads (not just tips)
471 471 self._branchcache = partial
472 472
473 473 def branchmap(self):
474 474 '''returns a dictionary {branch: [branchheads]}'''
475 475 self.updatebranchcache()
476 476 return self._branchcache
477 477
478 478 def branchtags(self):
479 479 '''return a dict where branch names map to the tipmost head of
480 480 the branch, open heads come before closed'''
481 481 bt = {}
482 482 for bn, heads in self.branchmap().iteritems():
483 483 tip = heads[-1]
484 484 for h in reversed(heads):
485 485 if 'close' not in self.changelog.read(h)[5]:
486 486 tip = h
487 487 break
488 488 bt[bn] = tip
489 489 return bt
490 490
491 491 def _readbranchcache(self):
492 492 partial = {}
493 493 try:
494 494 f = self.opener("cache/branchheads")
495 495 lines = f.read().split('\n')
496 496 f.close()
497 497 except (IOError, OSError):
498 498 return {}, nullid, nullrev
499 499
500 500 try:
501 501 last, lrev = lines.pop(0).split(" ", 1)
502 502 last, lrev = bin(last), int(lrev)
503 503 if lrev >= len(self) or self[lrev].node() != last:
504 504 # invalidate the cache
505 505 raise ValueError('invalidating branch cache (tip differs)')
506 506 for l in lines:
507 507 if not l:
508 508 continue
509 509 node, label = l.split(" ", 1)
510 510 label = encoding.tolocal(label.strip())
511 511 partial.setdefault(label, []).append(bin(node))
512 512 except KeyboardInterrupt:
513 513 raise
514 514 except Exception, inst:
515 515 if self.ui.debugflag:
516 516 self.ui.warn(str(inst), '\n')
517 517 partial, last, lrev = {}, nullid, nullrev
518 518 return partial, last, lrev
519 519
520 520 def _writebranchcache(self, branches, tip, tiprev):
521 521 try:
522 522 f = self.opener("cache/branchheads", "w", atomictemp=True)
523 523 f.write("%s %s\n" % (hex(tip), tiprev))
524 524 for label, nodes in branches.iteritems():
525 525 for node in nodes:
526 526 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
527 527 f.close()
528 528 except (IOError, OSError):
529 529 pass
530 530
531 531 def _updatebranchcache(self, partial, ctxgen):
532 532 # collect new branch entries
533 533 newbranches = {}
534 534 for c in ctxgen:
535 535 newbranches.setdefault(c.branch(), []).append(c.node())
536 536 # if older branchheads are reachable from new ones, they aren't
537 537 # really branchheads. Note checking parents is insufficient:
538 538 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
539 539 for branch, newnodes in newbranches.iteritems():
540 540 bheads = partial.setdefault(branch, [])
541 541 bheads.extend(newnodes)
542 542 if len(bheads) <= 1:
543 543 continue
544 544 bheads = sorted(bheads, key=lambda x: self[x].rev())
545 545 # starting from tip means fewer passes over reachable
546 546 while newnodes:
547 547 latest = newnodes.pop()
548 548 if latest not in bheads:
549 549 continue
550 550 minbhrev = self[bheads[0]].node()
551 551 reachable = self.changelog.reachable(latest, minbhrev)
552 552 reachable.remove(latest)
553 553 if reachable:
554 554 bheads = [b for b in bheads if b not in reachable]
555 555 partial[branch] = bheads
556 556
557 557 def lookup(self, key):
558 558 if isinstance(key, int):
559 559 return self.changelog.node(key)
560 560 elif key == '.':
561 561 return self.dirstate.p1()
562 562 elif key == 'null':
563 563 return nullid
564 564 elif key == 'tip':
565 565 return self.changelog.tip()
566 566 n = self.changelog._match(key)
567 567 if n:
568 568 return n
569 569 if key in self._bookmarks:
570 570 return self._bookmarks[key]
571 571 if key in self.tags():
572 572 return self.tags()[key]
573 573 if key in self.branchtags():
574 574 return self.branchtags()[key]
575 575 n = self.changelog._partialmatch(key)
576 576 if n:
577 577 return n
578 578
579 579 # can't find key, check if it might have come from damaged dirstate
580 580 if key in self.dirstate.parents():
581 581 raise error.Abort(_("working directory has unknown parent '%s'!")
582 582 % short(key))
583 583 try:
584 584 if len(key) == 20:
585 585 key = hex(key)
586 586 except TypeError:
587 587 pass
588 588 raise error.RepoLookupError(_("unknown revision '%s'") % key)
589 589
590 590 def lookupbranch(self, key, remote=None):
591 591 repo = remote or self
592 592 if key in repo.branchmap():
593 593 return key
594 594
595 595 repo = (remote and remote.local()) and remote or self
596 596 return repo[key].branch()
597 597
598 598 def known(self, nodes):
599 599 nm = self.changelog.nodemap
600 600 return [(n in nm) for n in nodes]
601 601
602 602 def local(self):
603 603 return self
604 604
605 605 def join(self, f):
606 606 return os.path.join(self.path, f)
607 607
608 608 def wjoin(self, f):
609 609 return os.path.join(self.root, f)
610 610
611 611 def file(self, f):
612 612 if f[0] == '/':
613 613 f = f[1:]
614 614 return filelog.filelog(self.sopener, f)
615 615
616 616 def changectx(self, changeid):
617 617 return self[changeid]
618 618
619 619 def parents(self, changeid=None):
620 620 '''get list of changectxs for parents of changeid'''
621 621 return self[changeid].parents()
622 622
623 623 def filectx(self, path, changeid=None, fileid=None):
624 624 """changeid can be a changeset revision, node, or tag.
625 625 fileid can be a file revision or node."""
626 626 return context.filectx(self, path, changeid, fileid)
627 627
628 628 def getcwd(self):
629 629 return self.dirstate.getcwd()
630 630
631 631 def pathto(self, f, cwd=None):
632 632 return self.dirstate.pathto(f, cwd)
633 633
634 634 def wfile(self, f, mode='r'):
635 635 return self.wopener(f, mode)
636 636
637 637 def _link(self, f):
638 638 return os.path.islink(self.wjoin(f))
639 639
640 640 def _loadfilter(self, filter):
641 641 if filter not in self.filterpats:
642 642 l = []
643 643 for pat, cmd in self.ui.configitems(filter):
644 644 if cmd == '!':
645 645 continue
646 646 mf = matchmod.match(self.root, '', [pat])
647 647 fn = None
648 648 params = cmd
649 649 for name, filterfn in self._datafilters.iteritems():
650 650 if cmd.startswith(name):
651 651 fn = filterfn
652 652 params = cmd[len(name):].lstrip()
653 653 break
654 654 if not fn:
655 655 fn = lambda s, c, **kwargs: util.filter(s, c)
656 656 # Wrap old filters not supporting keyword arguments
657 657 if not inspect.getargspec(fn)[2]:
658 658 oldfn = fn
659 659 fn = lambda s, c, **kwargs: oldfn(s, c)
660 660 l.append((mf, fn, params))
661 661 self.filterpats[filter] = l
662 662 return self.filterpats[filter]
663 663
664 664 def _filter(self, filterpats, filename, data):
665 665 for mf, fn, cmd in filterpats:
666 666 if mf(filename):
667 667 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
668 668 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
669 669 break
670 670
671 671 return data
672 672
673 673 @propertycache
674 674 def _encodefilterpats(self):
675 675 return self._loadfilter('encode')
676 676
677 677 @propertycache
678 678 def _decodefilterpats(self):
679 679 return self._loadfilter('decode')
680 680
681 681 def adddatafilter(self, name, filter):
682 682 self._datafilters[name] = filter
683 683
684 684 def wread(self, filename):
685 685 if self._link(filename):
686 686 data = os.readlink(self.wjoin(filename))
687 687 else:
688 688 data = self.wopener.read(filename)
689 689 return self._filter(self._encodefilterpats, filename, data)
690 690
691 691 def wwrite(self, filename, data, flags):
692 692 data = self._filter(self._decodefilterpats, filename, data)
693 693 if 'l' in flags:
694 694 self.wopener.symlink(data, filename)
695 695 else:
696 696 self.wopener.write(filename, data)
697 697 if 'x' in flags:
698 698 util.setflags(self.wjoin(filename), False, True)
699 699
700 700 def wwritedata(self, filename, data):
701 701 return self._filter(self._decodefilterpats, filename, data)
702 702
703 703 def transaction(self, desc):
704 704 tr = self._transref and self._transref() or None
705 705 if tr and tr.running():
706 706 return tr.nest()
707 707
708 708 # abort here if the journal already exists
709 709 if os.path.exists(self.sjoin("journal")):
710 710 raise error.RepoError(
711 711 _("abandoned transaction found - run hg recover"))
712 712
713 713 journalfiles = self._writejournal(desc)
714 714 renames = [(x, undoname(x)) for x in journalfiles]
715 715
716 716 tr = transaction.transaction(self.ui.warn, self.sopener,
717 717 self.sjoin("journal"),
718 718 aftertrans(renames),
719 719 self.store.createmode)
720 720 self._transref = weakref.ref(tr)
721 721 return tr
722 722
723 723 def _writejournal(self, desc):
724 724 # save dirstate for rollback
725 725 try:
726 726 ds = self.opener.read("dirstate")
727 727 except IOError:
728 728 ds = ""
729 729 self.opener.write("journal.dirstate", ds)
730 730 self.opener.write("journal.branch",
731 731 encoding.fromlocal(self.dirstate.branch()))
732 732 self.opener.write("journal.desc",
733 733 "%d\n%s\n" % (len(self), desc))
734 734
735 735 bkname = self.join('bookmarks')
736 736 if os.path.exists(bkname):
737 737 util.copyfile(bkname, self.join('journal.bookmarks'))
738 738 else:
739 739 self.opener.write('journal.bookmarks', '')
740 740
741 741 return (self.sjoin('journal'), self.join('journal.dirstate'),
742 742 self.join('journal.branch'), self.join('journal.desc'),
743 743 self.join('journal.bookmarks'))
744 744
745 745 def recover(self):
746 746 lock = self.lock()
747 747 try:
748 748 if os.path.exists(self.sjoin("journal")):
749 749 self.ui.status(_("rolling back interrupted transaction\n"))
750 750 transaction.rollback(self.sopener, self.sjoin("journal"),
751 751 self.ui.warn)
752 752 self.invalidate()
753 753 return True
754 754 else:
755 755 self.ui.warn(_("no interrupted transaction available\n"))
756 756 return False
757 757 finally:
758 758 lock.release()
759 759
760 760 def rollback(self, dryrun=False, force=False):
761 761 wlock = lock = None
762 762 try:
763 763 wlock = self.wlock()
764 764 lock = self.lock()
765 765 if os.path.exists(self.sjoin("undo")):
766 766 return self._rollback(dryrun, force)
767 767 else:
768 768 self.ui.warn(_("no rollback information available\n"))
769 769 return 1
770 770 finally:
771 771 release(lock, wlock)
772 772
773 773 def _rollback(self, dryrun, force):
774 774 ui = self.ui
775 775 try:
776 776 args = self.opener.read('undo.desc').splitlines()
777 777 (oldlen, desc, detail) = (int(args[0]), args[1], None)
778 778 if len(args) >= 3:
779 779 detail = args[2]
780 780 oldtip = oldlen - 1
781 781
782 782 if detail and ui.verbose:
783 783 msg = (_('repository tip rolled back to revision %s'
784 784 ' (undo %s: %s)\n')
785 785 % (oldtip, desc, detail))
786 786 else:
787 787 msg = (_('repository tip rolled back to revision %s'
788 788 ' (undo %s)\n')
789 789 % (oldtip, desc))
790 790 except IOError:
791 791 msg = _('rolling back unknown transaction\n')
792 792 desc = None
793 793
794 794 if not force and self['.'] != self['tip'] and desc == 'commit':
795 795 raise util.Abort(
796 796 _('rollback of last commit while not checked out '
797 797 'may lose data'), hint=_('use -f to force'))
798 798
799 799 ui.status(msg)
800 800 if dryrun:
801 801 return 0
802 802
803 803 parents = self.dirstate.parents()
804 804 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
805 805 if os.path.exists(self.join('undo.bookmarks')):
806 806 util.rename(self.join('undo.bookmarks'),
807 807 self.join('bookmarks'))
808 808 self.invalidate()
809 809
810 810 parentgone = (parents[0] not in self.changelog.nodemap or
811 811 parents[1] not in self.changelog.nodemap)
812 812 if parentgone:
813 813 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
814 814 try:
815 815 branch = self.opener.read('undo.branch')
816 816 self.dirstate.setbranch(branch)
817 817 except IOError:
818 818 ui.warn(_('named branch could not be reset: '
819 819 'current branch is still \'%s\'\n')
820 820 % self.dirstate.branch())
821 821
822 822 self.dirstate.invalidate()
823 823 self.destroyed()
824 824 parents = tuple([p.rev() for p in self.parents()])
825 825 if len(parents) > 1:
826 826 ui.status(_('working directory now based on '
827 827 'revisions %d and %d\n') % parents)
828 828 else:
829 829 ui.status(_('working directory now based on '
830 830 'revision %d\n') % parents)
831 831 return 0
832 832
833 833 def invalidatecaches(self):
834 834 try:
835 835 delattr(self, '_tagscache')
836 836 except AttributeError:
837 837 pass
838 838
839 839 self._branchcache = None # in UTF-8
840 840 self._branchcachetip = None
841 841
842 842 def invalidatedirstate(self):
843 843 '''Invalidates the dirstate, causing the next call to dirstate
844 844 to check if it was modified since the last time it was read,
845 845 rereading it if it has.
846 846
847 847 This is different to dirstate.invalidate() that it doesn't always
848 848 rereads the dirstate. Use dirstate.invalidate() if you want to
849 849 explicitly read the dirstate again (i.e. restoring it to a previous
850 850 known good state).'''
851 851 try:
852 852 delattr(self, 'dirstate')
853 853 except AttributeError:
854 854 pass
855 855
856 856 def invalidate(self):
857 857 for k in self._filecache:
858 858 # dirstate is invalidated separately in invalidatedirstate()
859 859 if k == 'dirstate':
860 860 continue
861 861
862 862 try:
863 863 delattr(self, k)
864 864 except AttributeError:
865 865 pass
866 866 self.invalidatecaches()
867 867
868 868 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
869 869 try:
870 870 l = lock.lock(lockname, 0, releasefn, desc=desc)
871 871 except error.LockHeld, inst:
872 872 if not wait:
873 873 raise
874 874 self.ui.warn(_("waiting for lock on %s held by %r\n") %
875 875 (desc, inst.locker))
876 876 # default to 600 seconds timeout
877 877 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
878 878 releasefn, desc=desc)
879 879 if acquirefn:
880 880 acquirefn()
881 881 return l
882 882
883 883 def lock(self, wait=True):
884 884 '''Lock the repository store (.hg/store) and return a weak reference
885 885 to the lock. Use this before modifying the store (e.g. committing or
886 886 stripping). If you are opening a transaction, get a lock as well.)'''
887 887 l = self._lockref and self._lockref()
888 888 if l is not None and l.held:
889 889 l.lock()
890 890 return l
891 891
892 892 def unlock():
893 893 self.store.write()
894 894 for k, ce in self._filecache.items():
895 895 if k == 'dirstate':
896 896 continue
897 897 ce.refresh()
898 898
899 899 l = self._lock(self.sjoin("lock"), wait, unlock,
900 900 self.invalidate, _('repository %s') % self.origroot)
901 901 self._lockref = weakref.ref(l)
902 902 return l
903 903
904 904 def wlock(self, wait=True):
905 905 '''Lock the non-store parts of the repository (everything under
906 906 .hg except .hg/store) and return a weak reference to the lock.
907 907 Use this before modifying files in .hg.'''
908 908 l = self._wlockref and self._wlockref()
909 909 if l is not None and l.held:
910 910 l.lock()
911 911 return l
912 912
913 913 def unlock():
914 914 self.dirstate.write()
915 915 ce = self._filecache.get('dirstate')
916 916 if ce:
917 917 ce.refresh()
918 918
919 919 l = self._lock(self.join("wlock"), wait, unlock,
920 920 self.invalidatedirstate, _('working directory of %s') %
921 921 self.origroot)
922 922 self._wlockref = weakref.ref(l)
923 923 return l
924 924
925 925 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
926 926 """
927 927 commit an individual file as part of a larger transaction
928 928 """
929 929
930 930 fname = fctx.path()
931 931 text = fctx.data()
932 932 flog = self.file(fname)
933 933 fparent1 = manifest1.get(fname, nullid)
934 934 fparent2 = fparent2o = manifest2.get(fname, nullid)
935 935
936 936 meta = {}
937 937 copy = fctx.renamed()
938 938 if copy and copy[0] != fname:
939 939 # Mark the new revision of this file as a copy of another
940 940 # file. This copy data will effectively act as a parent
941 941 # of this new revision. If this is a merge, the first
942 942 # parent will be the nullid (meaning "look up the copy data")
943 943 # and the second one will be the other parent. For example:
944 944 #
945 945 # 0 --- 1 --- 3 rev1 changes file foo
946 946 # \ / rev2 renames foo to bar and changes it
947 947 # \- 2 -/ rev3 should have bar with all changes and
948 948 # should record that bar descends from
949 949 # bar in rev2 and foo in rev1
950 950 #
951 951 # this allows this merge to succeed:
952 952 #
953 953 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
954 954 # \ / merging rev3 and rev4 should use bar@rev2
955 955 # \- 2 --- 4 as the merge base
956 956 #
957 957
958 958 cfname = copy[0]
959 959 crev = manifest1.get(cfname)
960 960 newfparent = fparent2
961 961
962 962 if manifest2: # branch merge
963 963 if fparent2 == nullid or crev is None: # copied on remote side
964 964 if cfname in manifest2:
965 965 crev = manifest2[cfname]
966 966 newfparent = fparent1
967 967
968 968 # find source in nearest ancestor if we've lost track
969 969 if not crev:
970 970 self.ui.debug(" %s: searching for copy revision for %s\n" %
971 971 (fname, cfname))
972 972 for ancestor in self[None].ancestors():
973 973 if cfname in ancestor:
974 974 crev = ancestor[cfname].filenode()
975 975 break
976 976
977 977 if crev:
978 978 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
979 979 meta["copy"] = cfname
980 980 meta["copyrev"] = hex(crev)
981 981 fparent1, fparent2 = nullid, newfparent
982 982 else:
983 983 self.ui.warn(_("warning: can't find ancestor for '%s' "
984 984 "copied from '%s'!\n") % (fname, cfname))
985 985
986 986 elif fparent2 != nullid:
987 987 # is one parent an ancestor of the other?
988 988 fparentancestor = flog.ancestor(fparent1, fparent2)
989 989 if fparentancestor == fparent1:
990 990 fparent1, fparent2 = fparent2, nullid
991 991 elif fparentancestor == fparent2:
992 992 fparent2 = nullid
993 993
994 994 # is the file changed?
995 995 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
996 996 changelist.append(fname)
997 997 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
998 998
999 999 # are just the flags changed during merge?
1000 1000 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1001 1001 changelist.append(fname)
1002 1002
1003 1003 return fparent1
1004 1004
1005 1005 def commit(self, text="", user=None, date=None, match=None, force=False,
1006 1006 editor=False, extra={}):
1007 1007 """Add a new revision to current repository.
1008 1008
1009 1009 Revision information is gathered from the working directory,
1010 1010 match can be used to filter the committed files. If editor is
1011 1011 supplied, it is called to get a commit message.
1012 1012 """
1013 1013
1014 1014 def fail(f, msg):
1015 1015 raise util.Abort('%s: %s' % (f, msg))
1016 1016
1017 1017 if not match:
1018 1018 match = matchmod.always(self.root, '')
1019 1019
1020 1020 if not force:
1021 1021 vdirs = []
1022 1022 match.dir = vdirs.append
1023 1023 match.bad = fail
1024 1024
1025 1025 wlock = self.wlock()
1026 1026 try:
1027 1027 wctx = self[None]
1028 1028 merge = len(wctx.parents()) > 1
1029 1029
1030 1030 if (not force and merge and match and
1031 1031 (match.files() or match.anypats())):
1032 1032 raise util.Abort(_('cannot partially commit a merge '
1033 1033 '(do not specify files or patterns)'))
1034 1034
1035 1035 changes = self.status(match=match, clean=force)
1036 1036 if force:
1037 1037 changes[0].extend(changes[6]) # mq may commit unchanged files
1038 1038
1039 1039 # check subrepos
1040 1040 subs = []
1041 1041 removedsubs = set()
1042 1042 if '.hgsub' in wctx:
1043 1043 # only manage subrepos and .hgsubstate if .hgsub is present
1044 1044 for p in wctx.parents():
1045 1045 removedsubs.update(s for s in p.substate if match(s))
1046 1046 for s in wctx.substate:
1047 1047 removedsubs.discard(s)
1048 1048 if match(s) and wctx.sub(s).dirty():
1049 1049 subs.append(s)
1050 1050 if (subs or removedsubs):
1051 1051 if (not match('.hgsub') and
1052 1052 '.hgsub' in (wctx.modified() + wctx.added())):
1053 1053 raise util.Abort(
1054 1054 _("can't commit subrepos without .hgsub"))
1055 1055 if '.hgsubstate' not in changes[0]:
1056 1056 changes[0].insert(0, '.hgsubstate')
1057 1057 if '.hgsubstate' in changes[2]:
1058 1058 changes[2].remove('.hgsubstate')
1059 1059 elif '.hgsub' in changes[2]:
1060 1060 # clean up .hgsubstate when .hgsub is removed
1061 1061 if ('.hgsubstate' in wctx and
1062 1062 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1063 1063 changes[2].insert(0, '.hgsubstate')
1064 1064
1065 1065 if subs and not self.ui.configbool('ui', 'commitsubrepos', False):
1066 1066 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
1067 1067 if changedsubs:
1068 1068 raise util.Abort(_("uncommitted changes in subrepo %s")
1069 1069 % changedsubs[0],
1070 1070 hint=_("use --subrepos for recursive commit"))
1071 1071
1072 1072 # make sure all explicit patterns are matched
1073 1073 if not force and match.files():
1074 1074 matched = set(changes[0] + changes[1] + changes[2])
1075 1075
1076 1076 for f in match.files():
1077 1077 if f == '.' or f in matched or f in wctx.substate:
1078 1078 continue
1079 1079 if f in changes[3]: # missing
1080 1080 fail(f, _('file not found!'))
1081 1081 if f in vdirs: # visited directory
1082 1082 d = f + '/'
1083 1083 for mf in matched:
1084 1084 if mf.startswith(d):
1085 1085 break
1086 1086 else:
1087 1087 fail(f, _("no match under directory!"))
1088 1088 elif f not in self.dirstate:
1089 1089 fail(f, _("file not tracked!"))
1090 1090
1091 1091 if (not force and not extra.get("close") and not merge
1092 1092 and not (changes[0] or changes[1] or changes[2])
1093 1093 and wctx.branch() == wctx.p1().branch()):
1094 1094 return None
1095 1095
1096 1096 ms = mergemod.mergestate(self)
1097 1097 for f in changes[0]:
1098 1098 if f in ms and ms[f] == 'u':
1099 1099 raise util.Abort(_("unresolved merge conflicts "
1100 1100 "(see hg help resolve)"))
1101 1101
1102 1102 cctx = context.workingctx(self, text, user, date, extra, changes)
1103 1103 if editor:
1104 1104 cctx._text = editor(self, cctx, subs)
1105 1105 edited = (text != cctx._text)
1106 1106
1107 1107 # commit subs
1108 1108 if subs or removedsubs:
1109 1109 state = wctx.substate.copy()
1110 1110 for s in sorted(subs):
1111 1111 sub = wctx.sub(s)
1112 1112 self.ui.status(_('committing subrepository %s\n') %
1113 1113 subrepo.subrelpath(sub))
1114 1114 sr = sub.commit(cctx._text, user, date)
1115 1115 state[s] = (state[s][0], sr)
1116 1116 subrepo.writestate(self, state)
1117 1117
1118 1118 # Save commit message in case this transaction gets rolled back
1119 1119 # (e.g. by a pretxncommit hook). Leave the content alone on
1120 1120 # the assumption that the user will use the same editor again.
1121 1121 msgfn = self.savecommitmessage(cctx._text)
1122 1122
1123 1123 p1, p2 = self.dirstate.parents()
1124 1124 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1125 1125 try:
1126 1126 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1127 1127 ret = self.commitctx(cctx, True)
1128 1128 except:
1129 1129 if edited:
1130 1130 self.ui.write(
1131 1131 _('note: commit message saved in %s\n') % msgfn)
1132 1132 raise
1133 1133
1134 1134 # update bookmarks, dirstate and mergestate
1135 1135 bookmarks.update(self, p1, ret)
1136 1136 for f in changes[0] + changes[1]:
1137 1137 self.dirstate.normal(f)
1138 1138 for f in changes[2]:
1139 1139 self.dirstate.drop(f)
1140 1140 self.dirstate.setparents(ret)
1141 1141 ms.reset()
1142 1142 finally:
1143 1143 wlock.release()
1144 1144
1145 1145 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1146 1146 return ret
1147 1147
1148 1148 def commitctx(self, ctx, error=False):
1149 1149 """Add a new revision to current repository.
1150 1150 Revision information is passed via the context argument.
1151 1151 """
1152 1152
1153 1153 tr = lock = None
1154 1154 removed = list(ctx.removed())
1155 1155 p1, p2 = ctx.p1(), ctx.p2()
1156 1156 user = ctx.user()
1157 1157
1158 1158 lock = self.lock()
1159 1159 try:
1160 1160 tr = self.transaction("commit")
1161 1161 trp = weakref.proxy(tr)
1162 1162
1163 1163 if ctx.files():
1164 1164 m1 = p1.manifest().copy()
1165 1165 m2 = p2.manifest()
1166 1166
1167 1167 # check in files
1168 1168 new = {}
1169 1169 changed = []
1170 1170 linkrev = len(self)
1171 1171 for f in sorted(ctx.modified() + ctx.added()):
1172 1172 self.ui.note(f + "\n")
1173 1173 try:
1174 1174 fctx = ctx[f]
1175 1175 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1176 1176 changed)
1177 1177 m1.set(f, fctx.flags())
1178 1178 except OSError, inst:
1179 1179 self.ui.warn(_("trouble committing %s!\n") % f)
1180 1180 raise
1181 1181 except IOError, inst:
1182 1182 errcode = getattr(inst, 'errno', errno.ENOENT)
1183 1183 if error or errcode and errcode != errno.ENOENT:
1184 1184 self.ui.warn(_("trouble committing %s!\n") % f)
1185 1185 raise
1186 1186 else:
1187 1187 removed.append(f)
1188 1188
1189 1189 # update manifest
1190 1190 m1.update(new)
1191 1191 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1192 1192 drop = [f for f in removed if f in m1]
1193 1193 for f in drop:
1194 1194 del m1[f]
1195 1195 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1196 1196 p2.manifestnode(), (new, drop))
1197 1197 files = changed + removed
1198 1198 else:
1199 1199 mn = p1.manifestnode()
1200 1200 files = []
1201 1201
1202 1202 # update changelog
1203 1203 self.changelog.delayupdate()
1204 1204 n = self.changelog.add(mn, files, ctx.description(),
1205 1205 trp, p1.node(), p2.node(),
1206 1206 user, ctx.date(), ctx.extra().copy())
1207 1207 p = lambda: self.changelog.writepending() and self.root or ""
1208 1208 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1209 1209 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1210 1210 parent2=xp2, pending=p)
1211 1211 self.changelog.finalize(trp)
1212 1212 tr.close()
1213 1213
1214 1214 if self._branchcache:
1215 1215 self.updatebranchcache()
1216 1216 return n
1217 1217 finally:
1218 1218 if tr:
1219 1219 tr.release()
1220 1220 lock.release()
1221 1221
1222 1222 def destroyed(self):
1223 1223 '''Inform the repository that nodes have been destroyed.
1224 1224 Intended for use by strip and rollback, so there's a common
1225 1225 place for anything that has to be done after destroying history.'''
1226 1226 # XXX it might be nice if we could take the list of destroyed
1227 1227 # nodes, but I don't see an easy way for rollback() to do that
1228 1228
1229 1229 # Ensure the persistent tag cache is updated. Doing it now
1230 1230 # means that the tag cache only has to worry about destroyed
1231 1231 # heads immediately after a strip/rollback. That in turn
1232 1232 # guarantees that "cachetip == currenttip" (comparing both rev
1233 1233 # and node) always means no nodes have been added or destroyed.
1234 1234
1235 1235 # XXX this is suboptimal when qrefresh'ing: we strip the current
1236 1236 # head, refresh the tag cache, then immediately add a new head.
1237 1237 # But I think doing it this way is necessary for the "instant
1238 1238 # tag cache retrieval" case to work.
1239 1239 self.invalidatecaches()
1240 1240
1241 1241 def walk(self, match, node=None):
1242 1242 '''
1243 1243 walk recursively through the directory tree or a given
1244 1244 changeset, finding all files matched by the match
1245 1245 function
1246 1246 '''
1247 1247 return self[node].walk(match)
1248 1248
1249 1249 def status(self, node1='.', node2=None, match=None,
1250 1250 ignored=False, clean=False, unknown=False,
1251 1251 listsubrepos=False):
1252 1252 """return status of files between two nodes or node and working directory
1253 1253
1254 1254 If node1 is None, use the first dirstate parent instead.
1255 1255 If node2 is None, compare node1 with working directory.
1256 1256 """
1257 1257
1258 1258 def mfmatches(ctx):
1259 1259 mf = ctx.manifest().copy()
1260 1260 for fn in mf.keys():
1261 1261 if not match(fn):
1262 1262 del mf[fn]
1263 1263 return mf
1264 1264
1265 1265 if isinstance(node1, context.changectx):
1266 1266 ctx1 = node1
1267 1267 else:
1268 1268 ctx1 = self[node1]
1269 1269 if isinstance(node2, context.changectx):
1270 1270 ctx2 = node2
1271 1271 else:
1272 1272 ctx2 = self[node2]
1273 1273
1274 1274 working = ctx2.rev() is None
1275 1275 parentworking = working and ctx1 == self['.']
1276 1276 match = match or matchmod.always(self.root, self.getcwd())
1277 1277 listignored, listclean, listunknown = ignored, clean, unknown
1278 1278
1279 1279 # load earliest manifest first for caching reasons
1280 1280 if not working and ctx2.rev() < ctx1.rev():
1281 1281 ctx2.manifest()
1282 1282
1283 1283 if not parentworking:
1284 1284 def bad(f, msg):
1285 1285 if f not in ctx1:
1286 1286 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1287 1287 match.bad = bad
1288 1288
1289 1289 if working: # we need to scan the working dir
1290 1290 subrepos = []
1291 1291 if '.hgsub' in self.dirstate:
1292 1292 subrepos = ctx2.substate.keys()
1293 1293 s = self.dirstate.status(match, subrepos, listignored,
1294 1294 listclean, listunknown)
1295 1295 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1296 1296
1297 1297 # check for any possibly clean files
1298 1298 if parentworking and cmp:
1299 1299 fixup = []
1300 1300 # do a full compare of any files that might have changed
1301 1301 for f in sorted(cmp):
1302 1302 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1303 1303 or ctx1[f].cmp(ctx2[f])):
1304 1304 modified.append(f)
1305 1305 else:
1306 1306 fixup.append(f)
1307 1307
1308 1308 # update dirstate for files that are actually clean
1309 1309 if fixup:
1310 1310 if listclean:
1311 1311 clean += fixup
1312 1312
1313 1313 try:
1314 1314 # updating the dirstate is optional
1315 1315 # so we don't wait on the lock
1316 1316 wlock = self.wlock(False)
1317 1317 try:
1318 1318 for f in fixup:
1319 1319 self.dirstate.normal(f)
1320 1320 finally:
1321 1321 wlock.release()
1322 1322 except error.LockError:
1323 1323 pass
1324 1324
1325 1325 if not parentworking:
1326 1326 mf1 = mfmatches(ctx1)
1327 1327 if working:
1328 1328 # we are comparing working dir against non-parent
1329 1329 # generate a pseudo-manifest for the working dir
1330 1330 mf2 = mfmatches(self['.'])
1331 1331 for f in cmp + modified + added:
1332 1332 mf2[f] = None
1333 1333 mf2.set(f, ctx2.flags(f))
1334 1334 for f in removed:
1335 1335 if f in mf2:
1336 1336 del mf2[f]
1337 1337 else:
1338 1338 # we are comparing two revisions
1339 1339 deleted, unknown, ignored = [], [], []
1340 1340 mf2 = mfmatches(ctx2)
1341 1341
1342 1342 modified, added, clean = [], [], []
1343 1343 for fn in mf2:
1344 1344 if fn in mf1:
1345 1345 if (fn not in deleted and
1346 1346 (mf1.flags(fn) != mf2.flags(fn) or
1347 1347 (mf1[fn] != mf2[fn] and
1348 1348 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1349 1349 modified.append(fn)
1350 1350 elif listclean:
1351 1351 clean.append(fn)
1352 1352 del mf1[fn]
1353 1353 elif fn not in deleted:
1354 1354 added.append(fn)
1355 1355 removed = mf1.keys()
1356 1356
1357 1357 if working and modified and not self.dirstate._checklink:
1358 1358 # Symlink placeholders may get non-symlink-like contents
1359 1359 # via user error or dereferencing by NFS or Samba servers,
1360 1360 # so we filter out any placeholders that don't look like a
1361 1361 # symlink
1362 1362 sane = []
1363 1363 for f in modified:
1364 1364 if ctx2.flags(f) == 'l':
1365 1365 d = ctx2[f].data()
1366 1366 if len(d) >= 1024 or '\n' in d or util.binary(d):
1367 1367 self.ui.debug('ignoring suspect symlink placeholder'
1368 1368 ' "%s"\n' % f)
1369 1369 continue
1370 1370 sane.append(f)
1371 1371 modified = sane
1372 1372
1373 1373 r = modified, added, removed, deleted, unknown, ignored, clean
1374 1374
1375 1375 if listsubrepos:
1376 1376 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1377 1377 if working:
1378 1378 rev2 = None
1379 1379 else:
1380 1380 rev2 = ctx2.substate[subpath][1]
1381 1381 try:
1382 1382 submatch = matchmod.narrowmatcher(subpath, match)
1383 1383 s = sub.status(rev2, match=submatch, ignored=listignored,
1384 1384 clean=listclean, unknown=listunknown,
1385 1385 listsubrepos=True)
1386 1386 for rfiles, sfiles in zip(r, s):
1387 1387 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1388 1388 except error.LookupError:
1389 1389 self.ui.status(_("skipping missing subrepository: %s\n")
1390 1390 % subpath)
1391 1391
1392 1392 for l in r:
1393 1393 l.sort()
1394 1394 return r
1395 1395
1396 1396 def heads(self, start=None):
1397 1397 heads = self.changelog.heads(start)
1398 1398 # sort the output in rev descending order
1399 1399 return sorted(heads, key=self.changelog.rev, reverse=True)
1400 1400
1401 1401 def branchheads(self, branch=None, start=None, closed=False):
1402 1402 '''return a (possibly filtered) list of heads for the given branch
1403 1403
1404 1404 Heads are returned in topological order, from newest to oldest.
1405 1405 If branch is None, use the dirstate branch.
1406 1406 If start is not None, return only heads reachable from start.
1407 1407 If closed is True, return heads that are marked as closed as well.
1408 1408 '''
1409 1409 if branch is None:
1410 1410 branch = self[None].branch()
1411 1411 branches = self.branchmap()
1412 1412 if branch not in branches:
1413 1413 return []
1414 1414 # the cache returns heads ordered lowest to highest
1415 1415 bheads = list(reversed(branches[branch]))
1416 1416 if start is not None:
1417 1417 # filter out the heads that cannot be reached from startrev
1418 1418 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1419 1419 bheads = [h for h in bheads if h in fbheads]
1420 1420 if not closed:
1421 1421 bheads = [h for h in bheads if
1422 1422 ('close' not in self.changelog.read(h)[5])]
1423 1423 return bheads
1424 1424
1425 1425 def branches(self, nodes):
1426 1426 if not nodes:
1427 1427 nodes = [self.changelog.tip()]
1428 1428 b = []
1429 1429 for n in nodes:
1430 1430 t = n
1431 1431 while True:
1432 1432 p = self.changelog.parents(n)
1433 1433 if p[1] != nullid or p[0] == nullid:
1434 1434 b.append((t, n, p[0], p[1]))
1435 1435 break
1436 1436 n = p[0]
1437 1437 return b
1438 1438
1439 1439 def between(self, pairs):
1440 1440 r = []
1441 1441
1442 1442 for top, bottom in pairs:
1443 1443 n, l, i = top, [], 0
1444 1444 f = 1
1445 1445
1446 1446 while n != bottom and n != nullid:
1447 1447 p = self.changelog.parents(n)[0]
1448 1448 if i == f:
1449 1449 l.append(n)
1450 1450 f = f * 2
1451 1451 n = p
1452 1452 i += 1
1453 1453
1454 1454 r.append(l)
1455 1455
1456 1456 return r
1457 1457
1458 1458 def pull(self, remote, heads=None, force=False):
1459 1459 lock = self.lock()
1460 1460 try:
1461 1461 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1462 1462 force=force)
1463 1463 common, fetch, rheads = tmp
1464 1464 if not fetch:
1465 1465 self.ui.status(_("no changes found\n"))
1466 1466 result = 0
1467 1467 else:
1468 1468 if heads is None and list(common) == [nullid]:
1469 1469 self.ui.status(_("requesting all changes\n"))
1470 1470 elif heads is None and remote.capable('changegroupsubset'):
1471 1471 # issue1320, avoid a race if remote changed after discovery
1472 1472 heads = rheads
1473 1473
1474 1474 if remote.capable('getbundle'):
1475 1475 cg = remote.getbundle('pull', common=common,
1476 1476 heads=heads or rheads)
1477 1477 elif heads is None:
1478 1478 cg = remote.changegroup(fetch, 'pull')
1479 1479 elif not remote.capable('changegroupsubset'):
1480 1480 raise util.Abort(_("partial pull cannot be done because "
1481 1481 "other repository doesn't support "
1482 1482 "changegroupsubset."))
1483 1483 else:
1484 1484 cg = remote.changegroupsubset(fetch, heads, 'pull')
1485 1485 result = self.addchangegroup(cg, 'pull', remote.url(),
1486 1486 lock=lock)
1487 1487 finally:
1488 1488 lock.release()
1489 1489
1490 1490 return result
1491 1491
1492 1492 def checkpush(self, force, revs):
1493 1493 """Extensions can override this function if additional checks have
1494 1494 to be performed before pushing, or call it if they override push
1495 1495 command.
1496 1496 """
1497 1497 pass
1498 1498
1499 1499 def push(self, remote, force=False, revs=None, newbranch=False):
1500 1500 '''Push outgoing changesets (limited by revs) from the current
1501 1501 repository to remote. Return an integer:
1502 1502 - 0 means HTTP error *or* nothing to push
1503 1503 - 1 means we pushed and remote head count is unchanged *or*
1504 1504 we have outgoing changesets but refused to push
1505 1505 - other values as described by addchangegroup()
1506 1506 '''
1507 1507 # there are two ways to push to remote repo:
1508 1508 #
1509 1509 # addchangegroup assumes local user can lock remote
1510 1510 # repo (local filesystem, old ssh servers).
1511 1511 #
1512 1512 # unbundle assumes local user cannot lock remote repo (new ssh
1513 1513 # servers, http servers).
1514 1514
1515 1515 self.checkpush(force, revs)
1516 1516 lock = None
1517 1517 unbundle = remote.capable('unbundle')
1518 1518 if not unbundle:
1519 1519 lock = remote.lock()
1520 1520 try:
1521 1521 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1522 1522 newbranch)
1523 1523 ret = remote_heads
1524 1524 if cg is not None:
1525 1525 if unbundle:
1526 1526 # local repo finds heads on server, finds out what
1527 1527 # revs it must push. once revs transferred, if server
1528 1528 # finds it has different heads (someone else won
1529 1529 # commit/push race), server aborts.
1530 1530 if force:
1531 1531 remote_heads = ['force']
1532 1532 # ssh: return remote's addchangegroup()
1533 1533 # http: return remote's addchangegroup() or 0 for error
1534 1534 ret = remote.unbundle(cg, remote_heads, 'push')
1535 1535 else:
1536 1536 # we return an integer indicating remote head count change
1537 1537 ret = remote.addchangegroup(cg, 'push', self.url(),
1538 1538 lock=lock)
1539 1539 finally:
1540 1540 if lock is not None:
1541 1541 lock.release()
1542 1542
1543 1543 self.ui.debug("checking for updated bookmarks\n")
1544 1544 rb = remote.listkeys('bookmarks')
1545 1545 for k in rb.keys():
1546 1546 if k in self._bookmarks:
1547 1547 nr, nl = rb[k], hex(self._bookmarks[k])
1548 1548 if nr in self:
1549 1549 cr = self[nr]
1550 1550 cl = self[nl]
1551 1551 if cl in cr.descendants():
1552 1552 r = remote.pushkey('bookmarks', k, nr, nl)
1553 1553 if r:
1554 1554 self.ui.status(_("updating bookmark %s\n") % k)
1555 1555 else:
1556 1556 self.ui.warn(_('updating bookmark %s'
1557 1557 ' failed!\n') % k)
1558 1558
1559 1559 return ret
1560 1560
1561 1561 def changegroupinfo(self, nodes, source):
1562 1562 if self.ui.verbose or source == 'bundle':
1563 1563 self.ui.status(_("%d changesets found\n") % len(nodes))
1564 1564 if self.ui.debugflag:
1565 1565 self.ui.debug("list of changesets:\n")
1566 1566 for node in nodes:
1567 1567 self.ui.debug("%s\n" % hex(node))
1568 1568
1569 1569 def changegroupsubset(self, bases, heads, source):
1570 1570 """Compute a changegroup consisting of all the nodes that are
1571 1571 descendants of any of the bases and ancestors of any of the heads.
1572 1572 Return a chunkbuffer object whose read() method will return
1573 1573 successive changegroup chunks.
1574 1574
1575 1575 It is fairly complex as determining which filenodes and which
1576 1576 manifest nodes need to be included for the changeset to be complete
1577 1577 is non-trivial.
1578 1578
1579 1579 Another wrinkle is doing the reverse, figuring out which changeset in
1580 1580 the changegroup a particular filenode or manifestnode belongs to.
1581 1581 """
1582 1582 cl = self.changelog
1583 1583 if not bases:
1584 1584 bases = [nullid]
1585 1585 csets, bases, heads = cl.nodesbetween(bases, heads)
1586 1586 # We assume that all ancestors of bases are known
1587 1587 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1588 1588 return self._changegroupsubset(common, csets, heads, source)
1589 1589
1590 1590 def getbundle(self, source, heads=None, common=None):
1591 1591 """Like changegroupsubset, but returns the set difference between the
1592 1592 ancestors of heads and the ancestors common.
1593 1593
1594 1594 If heads is None, use the local heads. If common is None, use [nullid].
1595 1595
1596 1596 The nodes in common might not all be known locally due to the way the
1597 1597 current discovery protocol works.
1598 1598 """
1599 1599 cl = self.changelog
1600 1600 if common:
1601 1601 nm = cl.nodemap
1602 1602 common = [n for n in common if n in nm]
1603 1603 else:
1604 1604 common = [nullid]
1605 1605 if not heads:
1606 1606 heads = cl.heads()
1607 1607 common, missing = cl.findcommonmissing(common, heads)
1608 1608 if not missing:
1609 1609 return None
1610 1610 return self._changegroupsubset(common, missing, heads, source)
1611 1611
1612 1612 def _changegroupsubset(self, commonrevs, csets, heads, source):
1613 1613
1614 1614 cl = self.changelog
1615 1615 mf = self.manifest
1616 1616 mfs = {} # needed manifests
1617 1617 fnodes = {} # needed file nodes
1618 1618 changedfiles = set()
1619 1619 fstate = ['', {}]
1620 1620 count = [0]
1621 1621
1622 1622 # can we go through the fast path ?
1623 1623 heads.sort()
1624 1624 if heads == sorted(self.heads()):
1625 1625 return self._changegroup(csets, source)
1626 1626
1627 1627 # slow path
1628 1628 self.hook('preoutgoing', throw=True, source=source)
1629 1629 self.changegroupinfo(csets, source)
1630 1630
1631 1631 # filter any nodes that claim to be part of the known set
1632 1632 def prune(revlog, missing):
1633 1633 return [n for n in missing
1634 1634 if revlog.linkrev(revlog.rev(n)) not in commonrevs]
1635 1635
1636 1636 def lookup(revlog, x):
1637 1637 if revlog == cl:
1638 1638 c = cl.read(x)
1639 1639 changedfiles.update(c[3])
1640 1640 mfs.setdefault(c[0], x)
1641 1641 count[0] += 1
1642 1642 self.ui.progress(_('bundling'), count[0],
1643 1643 unit=_('changesets'), total=len(csets))
1644 1644 return x
1645 1645 elif revlog == mf:
1646 1646 clnode = mfs[x]
1647 1647 mdata = mf.readfast(x)
1648 1648 for f in changedfiles:
1649 1649 if f in mdata:
1650 1650 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1651 1651 count[0] += 1
1652 1652 self.ui.progress(_('bundling'), count[0],
1653 1653 unit=_('manifests'), total=len(mfs))
1654 1654 return mfs[x]
1655 1655 else:
1656 1656 self.ui.progress(
1657 1657 _('bundling'), count[0], item=fstate[0],
1658 1658 unit=_('files'), total=len(changedfiles))
1659 1659 return fstate[1][x]
1660 1660
1661 1661 bundler = changegroup.bundle10(lookup)
1662 1662 reorder = self.ui.config('bundle', 'reorder', 'auto')
1663 1663 if reorder == 'auto':
1664 1664 reorder = None
1665 1665 else:
1666 1666 reorder = util.parsebool(reorder)
1667 1667
1668 1668 def gengroup():
1669 1669 # Create a changenode group generator that will call our functions
1670 1670 # back to lookup the owning changenode and collect information.
1671 1671 for chunk in cl.group(csets, bundler, reorder=reorder):
1672 1672 yield chunk
1673 1673 self.ui.progress(_('bundling'), None)
1674 1674
1675 1675 # Create a generator for the manifestnodes that calls our lookup
1676 1676 # and data collection functions back.
1677 1677 count[0] = 0
1678 1678 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
1679 1679 yield chunk
1680 1680 self.ui.progress(_('bundling'), None)
1681 1681
1682 1682 mfs.clear()
1683 1683
1684 1684 # Go through all our files in order sorted by name.
1685 1685 count[0] = 0
1686 1686 for fname in sorted(changedfiles):
1687 1687 filerevlog = self.file(fname)
1688 1688 if not len(filerevlog):
1689 1689 raise util.Abort(_("empty or missing revlog for %s") % fname)
1690 1690 fstate[0] = fname
1691 1691 fstate[1] = fnodes.pop(fname, {})
1692 1692
1693 1693 nodelist = prune(filerevlog, fstate[1])
1694 1694 if nodelist:
1695 1695 count[0] += 1
1696 1696 yield bundler.fileheader(fname)
1697 1697 for chunk in filerevlog.group(nodelist, bundler, reorder):
1698 1698 yield chunk
1699 1699
1700 1700 # Signal that no more groups are left.
1701 1701 yield bundler.close()
1702 1702 self.ui.progress(_('bundling'), None)
1703 1703
1704 1704 if csets:
1705 1705 self.hook('outgoing', node=hex(csets[0]), source=source)
1706 1706
1707 1707 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1708 1708
1709 1709 def changegroup(self, basenodes, source):
1710 1710 # to avoid a race we use changegroupsubset() (issue1320)
1711 1711 return self.changegroupsubset(basenodes, self.heads(), source)
1712 1712
1713 1713 def _changegroup(self, nodes, source):
1714 1714 """Compute the changegroup of all nodes that we have that a recipient
1715 1715 doesn't. Return a chunkbuffer object whose read() method will return
1716 1716 successive changegroup chunks.
1717 1717
1718 1718 This is much easier than the previous function as we can assume that
1719 1719 the recipient has any changenode we aren't sending them.
1720 1720
1721 1721 nodes is the set of nodes to send"""
1722 1722
1723 1723 cl = self.changelog
1724 1724 mf = self.manifest
1725 1725 mfs = {}
1726 1726 changedfiles = set()
1727 1727 fstate = ['']
1728 1728 count = [0]
1729 1729
1730 1730 self.hook('preoutgoing', throw=True, source=source)
1731 1731 self.changegroupinfo(nodes, source)
1732 1732
1733 1733 revset = set([cl.rev(n) for n in nodes])
1734 1734
1735 1735 def gennodelst(log):
1736 1736 return [log.node(r) for r in log if log.linkrev(r) in revset]
1737 1737
1738 1738 def lookup(revlog, x):
1739 1739 if revlog == cl:
1740 1740 c = cl.read(x)
1741 1741 changedfiles.update(c[3])
1742 1742 mfs.setdefault(c[0], x)
1743 1743 count[0] += 1
1744 1744 self.ui.progress(_('bundling'), count[0],
1745 1745 unit=_('changesets'), total=len(nodes))
1746 1746 return x
1747 1747 elif revlog == mf:
1748 1748 count[0] += 1
1749 1749 self.ui.progress(_('bundling'), count[0],
1750 1750 unit=_('manifests'), total=len(mfs))
1751 1751 return cl.node(revlog.linkrev(revlog.rev(x)))
1752 1752 else:
1753 1753 self.ui.progress(
1754 1754 _('bundling'), count[0], item=fstate[0],
1755 1755 total=len(changedfiles), unit=_('files'))
1756 1756 return cl.node(revlog.linkrev(revlog.rev(x)))
1757 1757
1758 1758 bundler = changegroup.bundle10(lookup)
1759 1759 reorder = self.ui.config('bundle', 'reorder', 'auto')
1760 1760 if reorder == 'auto':
1761 1761 reorder = None
1762 1762 else:
1763 1763 reorder = util.parsebool(reorder)
1764 1764
1765 1765 def gengroup():
1766 1766 '''yield a sequence of changegroup chunks (strings)'''
1767 1767 # construct a list of all changed files
1768 1768
1769 1769 for chunk in cl.group(nodes, bundler, reorder=reorder):
1770 1770 yield chunk
1771 1771 self.ui.progress(_('bundling'), None)
1772 1772
1773 1773 count[0] = 0
1774 1774 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
1775 1775 yield chunk
1776 1776 self.ui.progress(_('bundling'), None)
1777 1777
1778 1778 count[0] = 0
1779 1779 for fname in sorted(changedfiles):
1780 1780 filerevlog = self.file(fname)
1781 1781 if not len(filerevlog):
1782 1782 raise util.Abort(_("empty or missing revlog for %s") % fname)
1783 1783 fstate[0] = fname
1784 1784 nodelist = gennodelst(filerevlog)
1785 1785 if nodelist:
1786 1786 count[0] += 1
1787 1787 yield bundler.fileheader(fname)
1788 1788 for chunk in filerevlog.group(nodelist, bundler, reorder):
1789 1789 yield chunk
1790 1790 yield bundler.close()
1791 1791 self.ui.progress(_('bundling'), None)
1792 1792
1793 1793 if nodes:
1794 1794 self.hook('outgoing', node=hex(nodes[0]), source=source)
1795 1795
1796 1796 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1797 1797
1798 1798 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1799 1799 """Add the changegroup returned by source.read() to this repo.
1800 1800 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1801 1801 the URL of the repo where this changegroup is coming from.
1802 1802 If lock is not None, the function takes ownership of the lock
1803 1803 and releases it after the changegroup is added.
1804 1804
1805 1805 Return an integer summarizing the change to this repo:
1806 1806 - nothing changed or no source: 0
1807 1807 - more heads than before: 1+added heads (2..n)
1808 1808 - fewer heads than before: -1-removed heads (-2..-n)
1809 1809 - number of heads stays the same: 1
1810 1810 """
1811 1811 def csmap(x):
1812 1812 self.ui.debug("add changeset %s\n" % short(x))
1813 1813 return len(cl)
1814 1814
1815 1815 def revmap(x):
1816 1816 return cl.rev(x)
1817 1817
1818 1818 if not source:
1819 1819 return 0
1820 1820
1821 1821 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1822 1822
1823 1823 changesets = files = revisions = 0
1824 1824 efiles = set()
1825 1825
1826 1826 # write changelog data to temp files so concurrent readers will not see
1827 1827 # inconsistent view
1828 1828 cl = self.changelog
1829 1829 cl.delayupdate()
1830 1830 oldheads = cl.heads()
1831 1831
1832 1832 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
1833 1833 try:
1834 1834 trp = weakref.proxy(tr)
1835 1835 # pull off the changeset group
1836 1836 self.ui.status(_("adding changesets\n"))
1837 1837 clstart = len(cl)
1838 1838 class prog(object):
1839 1839 step = _('changesets')
1840 1840 count = 1
1841 1841 ui = self.ui
1842 1842 total = None
1843 1843 def __call__(self):
1844 1844 self.ui.progress(self.step, self.count, unit=_('chunks'),
1845 1845 total=self.total)
1846 1846 self.count += 1
1847 1847 pr = prog()
1848 1848 source.callback = pr
1849 1849
1850 1850 source.changelogheader()
1851 1851 if (cl.addgroup(source, csmap, trp) is None
1852 1852 and not emptyok):
1853 1853 raise util.Abort(_("received changelog group is empty"))
1854 1854 clend = len(cl)
1855 1855 changesets = clend - clstart
1856 1856 for c in xrange(clstart, clend):
1857 1857 efiles.update(self[c].files())
1858 1858 efiles = len(efiles)
1859 1859 self.ui.progress(_('changesets'), None)
1860 1860
1861 1861 # pull off the manifest group
1862 1862 self.ui.status(_("adding manifests\n"))
1863 1863 pr.step = _('manifests')
1864 1864 pr.count = 1
1865 1865 pr.total = changesets # manifests <= changesets
1866 1866 # no need to check for empty manifest group here:
1867 1867 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1868 1868 # no new manifest will be created and the manifest group will
1869 1869 # be empty during the pull
1870 1870 source.manifestheader()
1871 1871 self.manifest.addgroup(source, revmap, trp)
1872 1872 self.ui.progress(_('manifests'), None)
1873 1873
1874 1874 needfiles = {}
1875 1875 if self.ui.configbool('server', 'validate', default=False):
1876 1876 # validate incoming csets have their manifests
1877 1877 for cset in xrange(clstart, clend):
1878 1878 mfest = self.changelog.read(self.changelog.node(cset))[0]
1879 1879 mfest = self.manifest.readdelta(mfest)
1880 1880 # store file nodes we must see
1881 1881 for f, n in mfest.iteritems():
1882 1882 needfiles.setdefault(f, set()).add(n)
1883 1883
1884 1884 # process the files
1885 1885 self.ui.status(_("adding file changes\n"))
1886 1886 pr.step = _('files')
1887 1887 pr.count = 1
1888 1888 pr.total = efiles
1889 1889 source.callback = None
1890 1890
1891 1891 while True:
1892 1892 chunkdata = source.filelogheader()
1893 1893 if not chunkdata:
1894 1894 break
1895 1895 f = chunkdata["filename"]
1896 1896 self.ui.debug("adding %s revisions\n" % f)
1897 1897 pr()
1898 1898 fl = self.file(f)
1899 1899 o = len(fl)
1900 1900 if fl.addgroup(source, revmap, trp) is None:
1901 1901 raise util.Abort(_("received file revlog group is empty"))
1902 1902 revisions += len(fl) - o
1903 1903 files += 1
1904 1904 if f in needfiles:
1905 1905 needs = needfiles[f]
1906 1906 for new in xrange(o, len(fl)):
1907 1907 n = fl.node(new)
1908 1908 if n in needs:
1909 1909 needs.remove(n)
1910 1910 if not needs:
1911 1911 del needfiles[f]
1912 1912 self.ui.progress(_('files'), None)
1913 1913
1914 1914 for f, needs in needfiles.iteritems():
1915 1915 fl = self.file(f)
1916 1916 for n in needs:
1917 1917 try:
1918 1918 fl.rev(n)
1919 1919 except error.LookupError:
1920 1920 raise util.Abort(
1921 1921 _('missing file data for %s:%s - run hg verify') %
1922 1922 (f, hex(n)))
1923 1923
1924 1924 dh = 0
1925 1925 if oldheads:
1926 1926 heads = cl.heads()
1927 1927 dh = len(heads) - len(oldheads)
1928 1928 for h in heads:
1929 1929 if h not in oldheads and 'close' in self[h].extra():
1930 1930 dh -= 1
1931 1931 htext = ""
1932 1932 if dh:
1933 1933 htext = _(" (%+d heads)") % dh
1934 1934
1935 1935 self.ui.status(_("added %d changesets"
1936 1936 " with %d changes to %d files%s\n")
1937 1937 % (changesets, revisions, files, htext))
1938 1938
1939 1939 if changesets > 0:
1940 1940 p = lambda: cl.writepending() and self.root or ""
1941 1941 self.hook('pretxnchangegroup', throw=True,
1942 1942 node=hex(cl.node(clstart)), source=srctype,
1943 1943 url=url, pending=p)
1944 1944
1945 1945 # make changelog see real files again
1946 1946 cl.finalize(trp)
1947 1947
1948 1948 tr.close()
1949 1949 finally:
1950 1950 tr.release()
1951 1951 if lock:
1952 1952 lock.release()
1953 1953
1954 1954 if changesets > 0:
1955 1955 # forcefully update the on-disk branch cache
1956 1956 self.ui.debug("updating the branch cache\n")
1957 1957 self.updatebranchcache()
1958 1958 self.hook("changegroup", node=hex(cl.node(clstart)),
1959 1959 source=srctype, url=url)
1960 1960
1961 1961 for i in xrange(clstart, clend):
1962 1962 self.hook("incoming", node=hex(cl.node(i)),
1963 1963 source=srctype, url=url)
1964 1964
1965 1965 # never return 0 here:
1966 1966 if dh < 0:
1967 1967 return dh - 1
1968 1968 else:
1969 1969 return dh + 1
1970 1970
1971 1971 def stream_in(self, remote, requirements):
1972 1972 lock = self.lock()
1973 1973 try:
1974 1974 fp = remote.stream_out()
1975 1975 l = fp.readline()
1976 1976 try:
1977 1977 resp = int(l)
1978 1978 except ValueError:
1979 1979 raise error.ResponseError(
1980 1980 _('Unexpected response from remote server:'), l)
1981 1981 if resp == 1:
1982 1982 raise util.Abort(_('operation forbidden by server'))
1983 1983 elif resp == 2:
1984 1984 raise util.Abort(_('locking the remote repository failed'))
1985 1985 elif resp != 0:
1986 1986 raise util.Abort(_('the server sent an unknown error code'))
1987 1987 self.ui.status(_('streaming all changes\n'))
1988 1988 l = fp.readline()
1989 1989 try:
1990 1990 total_files, total_bytes = map(int, l.split(' ', 1))
1991 1991 except (ValueError, TypeError):
1992 1992 raise error.ResponseError(
1993 1993 _('Unexpected response from remote server:'), l)
1994 1994 self.ui.status(_('%d files to transfer, %s of data\n') %
1995 1995 (total_files, util.bytecount(total_bytes)))
1996 1996 start = time.time()
1997 1997 for i in xrange(total_files):
1998 1998 # XXX doesn't support '\n' or '\r' in filenames
1999 1999 l = fp.readline()
2000 2000 try:
2001 2001 name, size = l.split('\0', 1)
2002 2002 size = int(size)
2003 2003 except (ValueError, TypeError):
2004 2004 raise error.ResponseError(
2005 2005 _('Unexpected response from remote server:'), l)
2006 2006 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2007 2007 # for backwards compat, name was partially encoded
2008 2008 ofp = self.sopener(store.decodedir(name), 'w')
2009 2009 for chunk in util.filechunkiter(fp, limit=size):
2010 2010 ofp.write(chunk)
2011 2011 ofp.close()
2012 2012 elapsed = time.time() - start
2013 2013 if elapsed <= 0:
2014 2014 elapsed = 0.001
2015 2015 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2016 2016 (util.bytecount(total_bytes), elapsed,
2017 2017 util.bytecount(total_bytes / elapsed)))
2018 2018
2019 2019 # new requirements = old non-format requirements + new format-related
2020 2020 # requirements from the streamed-in repository
2021 2021 requirements.update(set(self.requirements) - self.supportedformats)
2022 2022 self._applyrequirements(requirements)
2023 2023 self._writerequirements()
2024 2024
2025 2025 self.invalidate()
2026 2026 return len(self.heads()) + 1
2027 2027 finally:
2028 2028 lock.release()
2029 2029
2030 2030 def clone(self, remote, heads=[], stream=False):
2031 2031 '''clone remote repository.
2032 2032
2033 2033 keyword arguments:
2034 2034 heads: list of revs to clone (forces use of pull)
2035 2035 stream: use streaming clone if possible'''
2036 2036
2037 2037 # now, all clients that can request uncompressed clones can
2038 2038 # read repo formats supported by all servers that can serve
2039 2039 # them.
2040 2040
2041 2041 # if revlog format changes, client will have to check version
2042 2042 # and format flags on "stream" capability, and use
2043 2043 # uncompressed only if compatible.
2044 2044
2045 2045 if stream and not heads:
2046 2046 # 'stream' means remote revlog format is revlogv1 only
2047 2047 if remote.capable('stream'):
2048 2048 return self.stream_in(remote, set(('revlogv1',)))
2049 2049 # otherwise, 'streamreqs' contains the remote revlog format
2050 2050 streamreqs = remote.capable('streamreqs')
2051 2051 if streamreqs:
2052 2052 streamreqs = set(streamreqs.split(','))
2053 2053 # if we support it, stream in and adjust our requirements
2054 2054 if not streamreqs - self.supportedformats:
2055 2055 return self.stream_in(remote, streamreqs)
2056 2056 return self.pull(remote, heads)
2057 2057
2058 2058 def pushkey(self, namespace, key, old, new):
2059 2059 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2060 2060 old=old, new=new)
2061 2061 ret = pushkey.push(self, namespace, key, old, new)
2062 2062 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2063 2063 ret=ret)
2064 2064 return ret
2065 2065
2066 2066 def listkeys(self, namespace):
2067 2067 self.hook('prelistkeys', throw=True, namespace=namespace)
2068 2068 values = pushkey.list(self, namespace)
2069 2069 self.hook('listkeys', namespace=namespace, values=values)
2070 2070 return values
2071 2071
2072 2072 def debugwireargs(self, one, two, three=None, four=None, five=None):
2073 2073 '''used to test argument passing over the wire'''
2074 2074 return "%s %s %s %s %s" % (one, two, three, four, five)
2075 2075
2076 2076 def savecommitmessage(self, text):
2077 2077 fp = self.opener('last-message.txt', 'wb')
2078 2078 try:
2079 2079 fp.write(text)
2080 2080 finally:
2081 2081 fp.close()
2082 2082 return self.pathto(fp.name[len(self.root)+1:])
2083 2083
2084 2084 # used to avoid circular references so destructors work
2085 2085 def aftertrans(files):
2086 2086 renamefiles = [tuple(t) for t in files]
2087 2087 def a():
2088 2088 for src, dest in renamefiles:
2089 2089 util.rename(src, dest)
2090 2090 return a
2091 2091
2092 2092 def undoname(fn):
2093 2093 base, name = os.path.split(fn)
2094 2094 assert name.startswith('journal')
2095 2095 return os.path.join(base, name.replace('journal', 'undo', 1))
2096 2096
2097 2097 def instance(ui, path, create):
2098 2098 return localrepository(ui, util.urllocalpath(path), create)
2099 2099
2100 2100 def islocal(path):
2101 2101 return True
@@ -1,453 +1,453
1 1 # Copyright (C) 2004, 2005 Canonical Ltd
2 2 #
3 3 # This program is free software; you can redistribute it and/or modify
4 4 # it under the terms of the GNU General Public License as published by
5 5 # the Free Software Foundation; either version 2 of the License, or
6 6 # (at your option) any later version.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU General Public License
14 14 # along with this program; if not, write to the Free Software
15 15 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
16 16
17 17 # mbp: "you know that thing where cvs gives you conflict markers?"
18 18 # s: "i hate that."
19 19
20 20 from i18n import _
21 21 import scmutil, util, mdiff
22 22 import sys, os
23 23
24 24 class CantReprocessAndShowBase(Exception):
25 25 pass
26 26
27 27 def intersect(ra, rb):
28 28 """Given two ranges return the range where they intersect or None.
29 29
30 30 >>> intersect((0, 10), (0, 6))
31 31 (0, 6)
32 32 >>> intersect((0, 10), (5, 15))
33 33 (5, 10)
34 34 >>> intersect((0, 10), (10, 15))
35 35 >>> intersect((0, 9), (10, 15))
36 36 >>> intersect((0, 9), (7, 15))
37 37 (7, 9)
38 38 """
39 39 assert ra[0] <= ra[1]
40 40 assert rb[0] <= rb[1]
41 41
42 42 sa = max(ra[0], rb[0])
43 43 sb = min(ra[1], rb[1])
44 44 if sa < sb:
45 45 return sa, sb
46 46 else:
47 47 return None
48 48
49 49 def compare_range(a, astart, aend, b, bstart, bend):
50 50 """Compare a[astart:aend] == b[bstart:bend], without slicing.
51 51 """
52 52 if (aend - astart) != (bend - bstart):
53 53 return False
54 54 for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)):
55 55 if a[ia] != b[ib]:
56 56 return False
57 57 else:
58 58 return True
59 59
60 60 class Merge3Text(object):
61 61 """3-way merge of texts.
62 62
63 63 Given strings BASE, OTHER, THIS, tries to produce a combined text
64 64 incorporating the changes from both BASE->OTHER and BASE->THIS."""
65 65 def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
66 66 self.basetext = basetext
67 67 self.atext = atext
68 68 self.btext = btext
69 69 if base is None:
70 70 base = mdiff.splitnewlines(basetext)
71 71 if a is None:
72 72 a = mdiff.splitnewlines(atext)
73 73 if b is None:
74 74 b = mdiff.splitnewlines(btext)
75 75 self.base = base
76 76 self.a = a
77 77 self.b = b
78 78
79 79 def merge_lines(self,
80 80 name_a=None,
81 81 name_b=None,
82 82 name_base=None,
83 83 start_marker='<<<<<<<',
84 84 mid_marker='=======',
85 85 end_marker='>>>>>>>',
86 86 base_marker=None,
87 87 reprocess=False):
88 88 """Return merge in cvs-like form.
89 89 """
90 90 self.conflicts = False
91 91 newline = '\n'
92 92 if len(self.a) > 0:
93 93 if self.a[0].endswith('\r\n'):
94 94 newline = '\r\n'
95 95 elif self.a[0].endswith('\r'):
96 96 newline = '\r'
97 97 if base_marker and reprocess:
98 98 raise CantReprocessAndShowBase()
99 99 if name_a:
100 100 start_marker = start_marker + ' ' + name_a
101 101 if name_b:
102 102 end_marker = end_marker + ' ' + name_b
103 103 if name_base and base_marker:
104 104 base_marker = base_marker + ' ' + name_base
105 105 merge_regions = self.merge_regions()
106 106 if reprocess is True:
107 107 merge_regions = self.reprocess_merge_regions(merge_regions)
108 108 for t in merge_regions:
109 109 what = t[0]
110 110 if what == 'unchanged':
111 111 for i in range(t[1], t[2]):
112 112 yield self.base[i]
113 113 elif what == 'a' or what == 'same':
114 114 for i in range(t[1], t[2]):
115 115 yield self.a[i]
116 116 elif what == 'b':
117 117 for i in range(t[1], t[2]):
118 118 yield self.b[i]
119 119 elif what == 'conflict':
120 120 self.conflicts = True
121 121 yield start_marker + newline
122 122 for i in range(t[3], t[4]):
123 123 yield self.a[i]
124 124 if base_marker is not None:
125 125 yield base_marker + newline
126 126 for i in range(t[1], t[2]):
127 127 yield self.base[i]
128 128 yield mid_marker + newline
129 129 for i in range(t[5], t[6]):
130 130 yield self.b[i]
131 131 yield end_marker + newline
132 132 else:
133 133 raise ValueError(what)
134 134
135 135 def merge_annotated(self):
136 136 """Return merge with conflicts, showing origin of lines.
137 137
138 138 Most useful for debugging merge.
139 139 """
140 140 for t in self.merge_regions():
141 141 what = t[0]
142 142 if what == 'unchanged':
143 143 for i in range(t[1], t[2]):
144 144 yield 'u | ' + self.base[i]
145 145 elif what == 'a' or what == 'same':
146 146 for i in range(t[1], t[2]):
147 147 yield what[0] + ' | ' + self.a[i]
148 148 elif what == 'b':
149 149 for i in range(t[1], t[2]):
150 150 yield 'b | ' + self.b[i]
151 151 elif what == 'conflict':
152 152 yield '<<<<\n'
153 153 for i in range(t[3], t[4]):
154 154 yield 'A | ' + self.a[i]
155 155 yield '----\n'
156 156 for i in range(t[5], t[6]):
157 157 yield 'B | ' + self.b[i]
158 158 yield '>>>>\n'
159 159 else:
160 160 raise ValueError(what)
161 161
162 162 def merge_groups(self):
163 163 """Yield sequence of line groups. Each one is a tuple:
164 164
165 165 'unchanged', lines
166 166 Lines unchanged from base
167 167
168 168 'a', lines
169 169 Lines taken from a
170 170
171 171 'same', lines
172 172 Lines taken from a (and equal to b)
173 173
174 174 'b', lines
175 175 Lines taken from b
176 176
177 177 'conflict', base_lines, a_lines, b_lines
178 178 Lines from base were changed to either a or b and conflict.
179 179 """
180 180 for t in self.merge_regions():
181 181 what = t[0]
182 182 if what == 'unchanged':
183 183 yield what, self.base[t[1]:t[2]]
184 184 elif what == 'a' or what == 'same':
185 185 yield what, self.a[t[1]:t[2]]
186 186 elif what == 'b':
187 187 yield what, self.b[t[1]:t[2]]
188 188 elif what == 'conflict':
189 189 yield (what,
190 190 self.base[t[1]:t[2]],
191 191 self.a[t[3]:t[4]],
192 192 self.b[t[5]:t[6]])
193 193 else:
194 194 raise ValueError(what)
195 195
196 196 def merge_regions(self):
197 197 """Return sequences of matching and conflicting regions.
198 198
199 199 This returns tuples, where the first value says what kind we
200 200 have:
201 201
202 202 'unchanged', start, end
203 203 Take a region of base[start:end]
204 204
205 205 'same', astart, aend
206 206 b and a are different from base but give the same result
207 207
208 208 'a', start, end
209 209 Non-clashing insertion from a[start:end]
210 210
211 211 Method is as follows:
212 212
213 213 The two sequences align only on regions which match the base
214 214 and both descendants. These are found by doing a two-way diff
215 215 of each one against the base, and then finding the
216 216 intersections between those regions. These "sync regions"
217 217 are by definition unchanged in both and easily dealt with.
218 218
219 219 The regions in between can be in any of three cases:
220 220 conflicted, or changed on only one side.
221 221 """
222 222
223 223 # section a[0:ia] has been disposed of, etc
224 224 iz = ia = ib = 0
225 225
226 226 for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
227 227 #print 'match base [%d:%d]' % (zmatch, zend)
228 228
229 229 matchlen = zend - zmatch
230 230 assert matchlen >= 0
231 231 assert matchlen == (aend - amatch)
232 232 assert matchlen == (bend - bmatch)
233 233
234 234 len_a = amatch - ia
235 235 len_b = bmatch - ib
236 236 len_base = zmatch - iz
237 237 assert len_a >= 0
238 238 assert len_b >= 0
239 239 assert len_base >= 0
240 240
241 241 #print 'unmatched a=%d, b=%d' % (len_a, len_b)
242 242
243 243 if len_a or len_b:
244 244 # try to avoid actually slicing the lists
245 245 equal_a = compare_range(self.a, ia, amatch,
246 246 self.base, iz, zmatch)
247 247 equal_b = compare_range(self.b, ib, bmatch,
248 248 self.base, iz, zmatch)
249 249 same = compare_range(self.a, ia, amatch,
250 250 self.b, ib, bmatch)
251 251
252 252 if same:
253 253 yield 'same', ia, amatch
254 254 elif equal_a and not equal_b:
255 255 yield 'b', ib, bmatch
256 256 elif equal_b and not equal_a:
257 257 yield 'a', ia, amatch
258 258 elif not equal_a and not equal_b:
259 259 yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch
260 260 else:
261 261 raise AssertionError("can't handle a=b=base but unmatched")
262 262
263 263 ia = amatch
264 264 ib = bmatch
265 265 iz = zmatch
266 266
267 267 # if the same part of the base was deleted on both sides
268 268 # that's OK, we can just skip it.
269 269
270 270
271 271 if matchlen > 0:
272 272 assert ia == amatch
273 273 assert ib == bmatch
274 274 assert iz == zmatch
275 275
276 276 yield 'unchanged', zmatch, zend
277 277 iz = zend
278 278 ia = aend
279 279 ib = bend
280 280
281 281 def reprocess_merge_regions(self, merge_regions):
282 282 """Where there are conflict regions, remove the agreed lines.
283 283
284 284 Lines where both A and B have made the same changes are
285 285 eliminated.
286 286 """
287 287 for region in merge_regions:
288 288 if region[0] != "conflict":
289 289 yield region
290 290 continue
291 291 type, iz, zmatch, ia, amatch, ib, bmatch = region
292 292 a_region = self.a[ia:amatch]
293 293 b_region = self.b[ib:bmatch]
294 294 matches = mdiff.get_matching_blocks(''.join(a_region),
295 295 ''.join(b_region))
296 296 next_a = ia
297 297 next_b = ib
298 298 for region_ia, region_ib, region_len in matches[:-1]:
299 299 region_ia += ia
300 300 region_ib += ib
301 301 reg = self.mismatch_region(next_a, region_ia, next_b,
302 302 region_ib)
303 303 if reg is not None:
304 304 yield reg
305 305 yield 'same', region_ia, region_len + region_ia
306 306 next_a = region_ia + region_len
307 307 next_b = region_ib + region_len
308 308 reg = self.mismatch_region(next_a, amatch, next_b, bmatch)
309 309 if reg is not None:
310 310 yield reg
311 311
312 312 def mismatch_region(next_a, region_ia, next_b, region_ib):
313 313 if next_a < region_ia or next_b < region_ib:
314 314 return 'conflict', None, None, next_a, region_ia, next_b, region_ib
315 315 mismatch_region = staticmethod(mismatch_region)
316 316
317 317 def find_sync_regions(self):
318 318 """Return a list of sync regions, where both descendants match the base.
319 319
320 320 Generates a list of (base1, base2, a1, a2, b1, b2). There is
321 321 always a zero-length sync region at the end of all the files.
322 322 """
323 323
324 324 ia = ib = 0
325 325 amatches = mdiff.get_matching_blocks(self.basetext, self.atext)
326 326 bmatches = mdiff.get_matching_blocks(self.basetext, self.btext)
327 327 len_a = len(amatches)
328 328 len_b = len(bmatches)
329 329
330 330 sl = []
331 331
332 332 while ia < len_a and ib < len_b:
333 333 abase, amatch, alen = amatches[ia]
334 334 bbase, bmatch, blen = bmatches[ib]
335 335
336 336 # there is an unconflicted block at i; how long does it
337 337 # extend? until whichever one ends earlier.
338 338 i = intersect((abase, abase + alen), (bbase, bbase + blen))
339 339 if i:
340 340 intbase = i[0]
341 341 intend = i[1]
342 342 intlen = intend - intbase
343 343
344 344 # found a match of base[i[0], i[1]]; this may be less than
345 345 # the region that matches in either one
346 346 assert intlen <= alen
347 347 assert intlen <= blen
348 348 assert abase <= intbase
349 349 assert bbase <= intbase
350 350
351 351 asub = amatch + (intbase - abase)
352 352 bsub = bmatch + (intbase - bbase)
353 353 aend = asub + intlen
354 354 bend = bsub + intlen
355 355
356 356 assert self.base[intbase:intend] == self.a[asub:aend], \
357 357 (self.base[intbase:intend], self.a[asub:aend])
358 358
359 359 assert self.base[intbase:intend] == self.b[bsub:bend]
360 360
361 361 sl.append((intbase, intend,
362 362 asub, aend,
363 363 bsub, bend))
364 364
365 365 # advance whichever one ends first in the base text
366 366 if (abase + alen) < (bbase + blen):
367 367 ia += 1
368 368 else:
369 369 ib += 1
370 370
371 371 intbase = len(self.base)
372 372 abase = len(self.a)
373 373 bbase = len(self.b)
374 374 sl.append((intbase, intbase, abase, abase, bbase, bbase))
375 375
376 376 return sl
377 377
378 378 def find_unconflicted(self):
379 379 """Return a list of ranges in base that are not conflicted."""
380 380 am = mdiff.get_matching_blocks(self.basetext, self.atext)
381 381 bm = mdiff.get_matching_blocks(self.basetext, self.btext)
382 382
383 383 unc = []
384 384
385 385 while am and bm:
386 386 # there is an unconflicted block at i; how long does it
387 387 # extend? until whichever one ends earlier.
388 388 a1 = am[0][0]
389 389 a2 = a1 + am[0][2]
390 390 b1 = bm[0][0]
391 391 b2 = b1 + bm[0][2]
392 392 i = intersect((a1, a2), (b1, b2))
393 393 if i:
394 394 unc.append(i)
395 395
396 396 if a2 < b2:
397 397 del am[0]
398 398 else:
399 399 del bm[0]
400 400
401 401 return unc
402 402
403 403 def simplemerge(ui, local, base, other, **opts):
404 404 def readfile(filename):
405 405 f = open(filename, "rb")
406 406 text = f.read()
407 407 f.close()
408 408 if util.binary(text):
409 409 msg = _("%s looks like a binary file.") % filename
410 410 if not opts.get('quiet'):
411 411 ui.warn(_('warning: %s\n') % msg)
412 412 if not opts.get('text'):
413 413 raise util.Abort(msg)
414 414 return text
415 415
416 416 name_a = local
417 417 name_b = other
418 418 labels = opts.get('label', [])
419 419 if labels:
420 420 name_a = labels.pop(0)
421 421 if labels:
422 422 name_b = labels.pop(0)
423 423 if labels:
424 424 raise util.Abort(_("can only specify two labels."))
425 425
426 426 try:
427 427 localtext = readfile(local)
428 428 basetext = readfile(base)
429 429 othertext = readfile(other)
430 430 except util.Abort:
431 431 return 1
432 432
433 local = os.path.realpath(local)
433 local = util.realpath(local)
434 434 if not opts.get('print'):
435 435 opener = scmutil.opener(os.path.dirname(local))
436 436 out = opener(os.path.basename(local), "w", atomictemp=True)
437 437 else:
438 438 out = sys.stdout
439 439
440 440 reprocess = not opts.get('no_minimal')
441 441
442 442 m3 = Merge3Text(basetext, localtext, othertext)
443 443 for line in m3.merge_lines(name_a=name_a, name_b=name_b,
444 444 reprocess=reprocess):
445 445 out.write(line)
446 446
447 447 if not opts.get('print'):
448 448 out.close()
449 449
450 450 if m3.conflicts:
451 451 if not opts.get('quiet'):
452 452 ui.warn(_("warning: conflicts during merge.\n"))
453 453 return 1
@@ -1,40 +1,40
1 1 import os
2 from mercurial import hg, ui
2 from mercurial import hg, ui, util
3 3 from mercurial.hgweb.hgwebdir_mod import hgwebdir
4 4
5 5 os.mkdir('webdir')
6 6 os.chdir('webdir')
7 7
8 webdir = os.path.realpath('.')
8 webdir = util.realpath('.')
9 9
10 10 u = ui.ui()
11 11 hg.repository(u, 'a', create=1)
12 12 hg.repository(u, 'b', create=1)
13 13 os.chdir('b')
14 14 hg.repository(u, 'd', create=1)
15 15 os.chdir('..')
16 16 hg.repository(u, 'c', create=1)
17 17 os.chdir('..')
18 18
19 19 paths = {'t/a/': '%s/a' % webdir,
20 20 'b': '%s/b' % webdir,
21 21 'coll': '%s/*' % webdir,
22 22 'rcoll': '%s/**' % webdir}
23 23
24 24 config = os.path.join(webdir, 'hgwebdir.conf')
25 25 configfile = open(config, 'w')
26 26 configfile.write('[paths]\n')
27 27 for k, v in paths.items():
28 28 configfile.write('%s = %s\n' % (k, v))
29 29 configfile.close()
30 30
31 31 confwd = hgwebdir(config)
32 32 dictwd = hgwebdir(paths)
33 33
34 34 assert len(confwd.repos) == len(dictwd.repos), 'different numbers'
35 35 assert len(confwd.repos) == 9, 'expected 9 repos, found %d' % len(confwd.repos)
36 36
37 37 found = dict(confwd.repos)
38 38 for key, path in dictwd.repos:
39 39 assert key in found, 'repository %s was not found' % key
40 40 assert found[key] == path, 'different paths for repo %s' % key
General Comments 0
You need to be logged in to leave comments. Login now