##// END OF EJS Templates
commit: move commit editor to cmdutil, pass as function
Matt Mackall -
r8407:223000a6 default
parent child Browse files
Show More
@@ -1,147 +1,148 b''
1 1 # fetch.py - pull and merge remote changes
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 '''pulling, updating and merging in one command'''
9 9
10 10 from mercurial.i18n import _
11 11 from mercurial.node import nullid, short
12 12 from mercurial import commands, cmdutil, hg, util, url
13 13 from mercurial.lock import release
14 14
15 15 def fetch(ui, repo, source='default', **opts):
16 16 '''pull changes from a remote repository, merge new changes if needed.
17 17
18 18 This finds all changes from the repository at the specified path
19 19 or URL and adds them to the local repository.
20 20
21 21 If the pulled changes add a new branch head, the head is
22 22 automatically merged, and the result of the merge is committed.
23 23 Otherwise, the working directory is updated to include the new
24 24 changes.
25 25
26 26 When a merge occurs, the newly pulled changes are assumed to be
27 27 "authoritative". The head of the new changes is used as the first
28 28 parent, with local changes as the second. To switch the merge
29 29 order, use --switch-parent.
30 30
31 31 See 'hg help dates' for a list of formats valid for -d/--date.
32 32 '''
33 33
34 34 date = opts.get('date')
35 35 if date:
36 36 opts['date'] = util.parsedate(date)
37 37
38 38 parent, p2 = repo.dirstate.parents()
39 39 branch = repo.dirstate.branch()
40 40 branchnode = repo.branchtags().get(branch)
41 41 if parent != branchnode:
42 42 raise util.Abort(_('working dir not at branch tip '
43 43 '(use "hg update" to check out branch tip)'))
44 44
45 45 if p2 != nullid:
46 46 raise util.Abort(_('outstanding uncommitted merge'))
47 47
48 48 wlock = lock = None
49 49 try:
50 50 wlock = repo.wlock()
51 51 lock = repo.lock()
52 52 mod, add, rem, del_ = repo.status()[:4]
53 53
54 54 if mod or add or rem:
55 55 raise util.Abort(_('outstanding uncommitted changes'))
56 56 if del_:
57 57 raise util.Abort(_('working directory is missing some files'))
58 58 bheads = repo.branchheads(branch)
59 59 bheads = [head for head in bheads if len(repo[head].children()) == 0]
60 60 if len(bheads) > 1:
61 61 raise util.Abort(_('multiple heads in this branch '
62 62 '(use "hg heads ." and "hg merge" to merge)'))
63 63
64 64 other = hg.repository(cmdutil.remoteui(repo, opts),
65 65 ui.expandpath(source))
66 66 ui.status(_('pulling from %s\n') %
67 67 url.hidepassword(ui.expandpath(source)))
68 68 revs = None
69 69 if opts['rev']:
70 70 if not other.local():
71 71 raise util.Abort(_("fetch -r doesn't work for remote "
72 72 "repositories yet"))
73 73 else:
74 74 revs = [other.lookup(rev) for rev in opts['rev']]
75 75
76 76 # Are there any changes at all?
77 77 modheads = repo.pull(other, heads=revs)
78 78 if modheads == 0:
79 79 return 0
80 80
81 81 # Is this a simple fast-forward along the current branch?
82 82 newheads = repo.branchheads(branch)
83 83 newheads = [head for head in newheads if len(repo[head].children()) == 0]
84 84 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
85 85 if len(newheads) == 1:
86 86 if newchildren[0] != parent:
87 87 return hg.clean(repo, newchildren[0])
88 88 else:
89 89 return
90 90
91 91 # Are there more than one additional branch heads?
92 92 newchildren = [n for n in newchildren if n != parent]
93 93 newparent = parent
94 94 if newchildren:
95 95 newparent = newchildren[0]
96 96 hg.clean(repo, newparent)
97 97 newheads = [n for n in newheads if n != newparent]
98 98 if len(newheads) > 1:
99 99 ui.status(_('not merging with %d other new branch heads '
100 100 '(use "hg heads ." and "hg merge" to merge them)\n') %
101 101 (len(newheads) - 1))
102 102 return
103 103
104 104 # Otherwise, let's merge.
105 105 err = False
106 106 if newheads:
107 107 # By default, we consider the repository we're pulling
108 108 # *from* as authoritative, so we merge our changes into
109 109 # theirs.
110 110 if opts['switch_parent']:
111 111 firstparent, secondparent = newparent, newheads[0]
112 112 else:
113 113 firstparent, secondparent = newheads[0], newparent
114 114 ui.status(_('updating to %d:%s\n') %
115 115 (repo.changelog.rev(firstparent),
116 116 short(firstparent)))
117 117 hg.clean(repo, firstparent)
118 118 ui.status(_('merging with %d:%s\n') %
119 119 (repo.changelog.rev(secondparent), short(secondparent)))
120 120 err = hg.merge(repo, secondparent, remind=False)
121 121
122 122 if not err:
123 123 mod, add, rem = repo.status()[:3]
124 124 message = (cmdutil.logmessage(opts) or
125 125 (_('Automated merge with %s') %
126 126 url.removeauth(other.url())))
127 force_editor = opts.get('force_editor') or opts.get('edit')
128 n = repo.commit(mod + add + rem, message,
129 opts['user'], opts['date'], force=True,
130 force_editor=force_editor)
127 editor = cmdutil.commiteditor
128 if opts.get('force_editor') or opts.get('edit'):
129 editor = cmdutil.commitforceeditor
130 n = repo.commit(mod + add + rem, message, opts['user'],
131 opts['date'], force=True, editor=editor)
131 132 ui.status(_('new changeset %d:%s merges remote changes '
132 133 'with local\n') % (repo.changelog.rev(n),
133 134 short(n)))
134 135
135 136 finally:
136 137 release(lock, wlock)
137 138
138 139 cmdtable = {
139 140 'fetch':
140 141 (fetch,
141 142 [('r', 'rev', [], _('a specific revision you would like to pull')),
142 143 ('e', 'edit', None, _('edit commit message')),
143 144 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
144 145 ('', 'switch-parent', None, _('switch parents when merging')),
145 146 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
146 147 _('hg fetch [SOURCE]')),
147 148 }
@@ -1,536 +1,534 b''
1 1 # keyword.py - $Keyword$ expansion for Mercurial
2 2 #
3 3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7 #
8 8 # $Id$
9 9 #
10 10 # Keyword expansion hack against the grain of a DSCM
11 11 #
12 12 # There are many good reasons why this is not needed in a distributed
13 13 # SCM, still it may be useful in very small projects based on single
14 14 # files (like LaTeX packages), that are mostly addressed to an
15 15 # audience not running a version control system.
16 16 #
17 17 # For in-depth discussion refer to
18 18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
19 19 #
20 20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 21 #
22 22 # Binary files are not touched.
23 23 #
24 24 # Setup in hgrc:
25 25 #
26 26 # [extensions]
27 27 # # enable extension
28 28 # hgext.keyword =
29 29 #
30 30 # Files to act upon/ignore are specified in the [keyword] section.
31 31 # Customized keyword template mappings in the [keywordmaps] section.
32 32 #
33 33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
34 34
35 35 '''keyword expansion in local repositories
36 36
37 37 This extension expands RCS/CVS-like or self-customized $Keywords$ in
38 38 tracked text files selected by your configuration.
39 39
40 40 Keywords are only expanded in local repositories and not stored in the
41 41 change history. The mechanism can be regarded as a convenience for the
42 42 current user or for archive distribution.
43 43
44 44 Configuration is done in the [keyword] and [keywordmaps] sections of
45 45 hgrc files.
46 46
47 47 Example:
48 48
49 49 [keyword]
50 50 # expand keywords in every python file except those matching "x*"
51 51 **.py =
52 52 x* = ignore
53 53
54 54 Note: the more specific you are in your filename patterns
55 55 the less you lose speed in huge repositories.
56 56
57 57 For [keywordmaps] template mapping and expansion demonstration and
58 58 control run "hg kwdemo".
59 59
60 60 An additional date template filter {date|utcdate} is provided.
61 61
62 62 The default template mappings (view with "hg kwdemo -d") can be
63 63 replaced with customized keywords and templates. Again, run "hg
64 64 kwdemo" to control the results of your config changes.
65 65
66 66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
67 67 the risk of inadvertedly storing expanded keywords in the change
68 68 history.
69 69
70 70 To force expansion after enabling it, or a configuration change, run
71 71 "hg kwexpand".
72 72
73 73 Also, when committing with the record extension or using mq's qrecord,
74 74 be aware that keywords cannot be updated. Again, run "hg kwexpand" on
75 75 the files in question to update keyword expansions after all changes
76 76 have been checked in.
77 77
78 78 Expansions spanning more than one line and incremental expansions,
79 79 like CVS' $Log$, are not supported. A keyword template map
80 80 "Log = {desc}" expands to the first line of the changeset description.
81 81 '''
82 82
83 83 from mercurial import commands, cmdutil, dispatch, filelog, revlog, extensions
84 84 from mercurial import patch, localrepo, templater, templatefilters, util
85 85 from mercurial.hgweb import webcommands
86 86 from mercurial.lock import release
87 87 from mercurial.node import nullid, hex
88 88 from mercurial.i18n import _
89 89 import re, shutil, tempfile, time
90 90
91 91 commands.optionalrepo += ' kwdemo'
92 92
93 93 # hg commands that do not act on keywords
94 94 nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
95 95 ' log outgoing push rename rollback tip verify'
96 96 ' convert email glog')
97 97
98 98 # hg commands that trigger expansion only when writing to working dir,
99 99 # not when reading filelog, and unexpand when reading from working dir
100 100 restricted = 'merge record resolve qfold qimport qnew qpush qrefresh qrecord'
101 101
102 102 def utcdate(date):
103 103 '''Returns hgdate in cvs-like UTC format.'''
104 104 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
105 105
106 106 # make keyword tools accessible
107 107 kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
108 108
109 109
110 110 class kwtemplater(object):
111 111 '''
112 112 Sets up keyword templates, corresponding keyword regex, and
113 113 provides keyword substitution functions.
114 114 '''
115 115 templates = {
116 116 'Revision': '{node|short}',
117 117 'Author': '{author|user}',
118 118 'Date': '{date|utcdate}',
119 119 'RCSFile': '{file|basename},v',
120 120 'Source': '{root}/{file},v',
121 121 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
122 122 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
123 123 }
124 124
125 125 def __init__(self, ui, repo):
126 126 self.ui = ui
127 127 self.repo = repo
128 128 self.matcher = util.matcher(repo.root,
129 129 inc=kwtools['inc'], exc=kwtools['exc'])[1]
130 130 self.restrict = kwtools['hgcmd'] in restricted.split()
131 131
132 132 kwmaps = self.ui.configitems('keywordmaps')
133 133 if kwmaps: # override default templates
134 134 kwmaps = [(k, templater.parsestring(v, False))
135 135 for (k, v) in kwmaps]
136 136 self.templates = dict(kwmaps)
137 137 escaped = map(re.escape, self.templates.keys())
138 138 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
139 139 self.re_kw = re.compile(kwpat)
140 140
141 141 templatefilters.filters['utcdate'] = utcdate
142 142 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
143 143 False, None, '', False)
144 144
145 145 def substitute(self, data, path, ctx, subfunc):
146 146 '''Replaces keywords in data with expanded template.'''
147 147 def kwsub(mobj):
148 148 kw = mobj.group(1)
149 149 self.ct.use_template(self.templates[kw])
150 150 self.ui.pushbuffer()
151 151 self.ct.show(ctx, root=self.repo.root, file=path)
152 152 ekw = templatefilters.firstline(self.ui.popbuffer())
153 153 return '$%s: %s $' % (kw, ekw)
154 154 return subfunc(kwsub, data)
155 155
156 156 def expand(self, path, node, data):
157 157 '''Returns data with keywords expanded.'''
158 158 if not self.restrict and self.matcher(path) and not util.binary(data):
159 159 ctx = self.repo.filectx(path, fileid=node).changectx()
160 160 return self.substitute(data, path, ctx, self.re_kw.sub)
161 161 return data
162 162
163 163 def iskwfile(self, path, flagfunc):
164 164 '''Returns true if path matches [keyword] pattern
165 165 and is not a symbolic link.
166 166 Caveat: localrepository._link fails on Windows.'''
167 167 return self.matcher(path) and not 'l' in flagfunc(path)
168 168
169 169 def overwrite(self, node, expand, files):
170 170 '''Overwrites selected files expanding/shrinking keywords.'''
171 171 ctx = self.repo[node]
172 172 mf = ctx.manifest()
173 173 if node is not None: # commit
174 174 files = [f for f in ctx.files() if f in mf]
175 175 notify = self.ui.debug
176 176 else: # kwexpand/kwshrink
177 177 notify = self.ui.note
178 178 candidates = [f for f in files if self.iskwfile(f, ctx.flags)]
179 179 if candidates:
180 180 self.restrict = True # do not expand when reading
181 181 msg = (expand and _('overwriting %s expanding keywords\n')
182 182 or _('overwriting %s shrinking keywords\n'))
183 183 for f in candidates:
184 184 fp = self.repo.file(f)
185 185 data = fp.read(mf[f])
186 186 if util.binary(data):
187 187 continue
188 188 if expand:
189 189 if node is None:
190 190 ctx = self.repo.filectx(f, fileid=mf[f]).changectx()
191 191 data, found = self.substitute(data, f, ctx,
192 192 self.re_kw.subn)
193 193 else:
194 194 found = self.re_kw.search(data)
195 195 if found:
196 196 notify(msg % f)
197 197 self.repo.wwrite(f, data, mf.flags(f))
198 198 self.repo.dirstate.normal(f)
199 199 self.restrict = False
200 200
201 201 def shrinktext(self, text):
202 202 '''Unconditionally removes all keyword substitutions from text.'''
203 203 return self.re_kw.sub(r'$\1$', text)
204 204
205 205 def shrink(self, fname, text):
206 206 '''Returns text with all keyword substitutions removed.'''
207 207 if self.matcher(fname) and not util.binary(text):
208 208 return self.shrinktext(text)
209 209 return text
210 210
211 211 def shrinklines(self, fname, lines):
212 212 '''Returns lines with keyword substitutions removed.'''
213 213 if self.matcher(fname):
214 214 text = ''.join(lines)
215 215 if not util.binary(text):
216 216 return self.shrinktext(text).splitlines(True)
217 217 return lines
218 218
219 219 def wread(self, fname, data):
220 220 '''If in restricted mode returns data read from wdir with
221 221 keyword substitutions removed.'''
222 222 return self.restrict and self.shrink(fname, data) or data
223 223
224 224 class kwfilelog(filelog.filelog):
225 225 '''
226 226 Subclass of filelog to hook into its read, add, cmp methods.
227 227 Keywords are "stored" unexpanded, and processed on reading.
228 228 '''
229 229 def __init__(self, opener, kwt, path):
230 230 super(kwfilelog, self).__init__(opener, path)
231 231 self.kwt = kwt
232 232 self.path = path
233 233
234 234 def read(self, node):
235 235 '''Expands keywords when reading filelog.'''
236 236 data = super(kwfilelog, self).read(node)
237 237 return self.kwt.expand(self.path, node, data)
238 238
239 239 def add(self, text, meta, tr, link, p1=None, p2=None):
240 240 '''Removes keyword substitutions when adding to filelog.'''
241 241 text = self.kwt.shrink(self.path, text)
242 242 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
243 243
244 244 def cmp(self, node, text):
245 245 '''Removes keyword substitutions for comparison.'''
246 246 text = self.kwt.shrink(self.path, text)
247 247 if self.renamed(node):
248 248 t2 = super(kwfilelog, self).read(node)
249 249 return t2 != text
250 250 return revlog.revlog.cmp(self, node, text)
251 251
252 252 def _status(ui, repo, kwt, unknown, *pats, **opts):
253 253 '''Bails out if [keyword] configuration is not active.
254 254 Returns status of working directory.'''
255 255 if kwt:
256 256 matcher = cmdutil.match(repo, pats, opts)
257 257 return repo.status(match=matcher, unknown=unknown, clean=True)
258 258 if ui.configitems('keyword'):
259 259 raise util.Abort(_('[keyword] patterns cannot match'))
260 260 raise util.Abort(_('no [keyword] patterns configured'))
261 261
262 262 def _kwfwrite(ui, repo, expand, *pats, **opts):
263 263 '''Selects files and passes them to kwtemplater.overwrite.'''
264 264 if repo.dirstate.parents()[1] != nullid:
265 265 raise util.Abort(_('outstanding uncommitted merge'))
266 266 kwt = kwtools['templater']
267 267 status = _status(ui, repo, kwt, False, *pats, **opts)
268 268 modified, added, removed, deleted = status[:4]
269 269 if modified or added or removed or deleted:
270 270 raise util.Abort(_('outstanding uncommitted changes'))
271 271 wlock = lock = None
272 272 try:
273 273 wlock = repo.wlock()
274 274 lock = repo.lock()
275 275 kwt.overwrite(None, expand, status[6])
276 276 finally:
277 277 release(lock, wlock)
278 278
279 279 def demo(ui, repo, *args, **opts):
280 280 '''print [keywordmaps] configuration and an expansion example
281 281
282 282 Show current, custom, or default keyword template maps and their
283 283 expansion.
284 284
285 285 Extend current configuration by specifying maps as arguments and
286 286 optionally by reading from an additional hgrc file.
287 287
288 288 Override current keyword template maps with "default" option.
289 289 '''
290 290 def demostatus(stat):
291 291 ui.status(_('\n\t%s\n') % stat)
292 292
293 293 def demoitems(section, items):
294 294 ui.write('[%s]\n' % section)
295 295 for k, v in items:
296 296 ui.write('%s = %s\n' % (k, v))
297 297
298 298 msg = 'hg keyword config and expansion example'
299 299 kwstatus = 'current'
300 300 fn = 'demo.txt'
301 301 branchname = 'demobranch'
302 302 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
303 303 ui.note(_('creating temporary repository at %s\n') % tmpdir)
304 304 repo = localrepo.localrepository(ui, tmpdir, True)
305 305 ui.setconfig('keyword', fn, '')
306 306 if args or opts.get('rcfile'):
307 307 kwstatus = 'custom'
308 308 if opts.get('rcfile'):
309 309 ui.readconfig(opts.get('rcfile'))
310 310 if opts.get('default'):
311 311 kwstatus = 'default'
312 312 kwmaps = kwtemplater.templates
313 313 if ui.configitems('keywordmaps'):
314 314 # override maps from optional rcfile
315 315 for k, v in kwmaps.iteritems():
316 316 ui.setconfig('keywordmaps', k, v)
317 317 elif args:
318 318 # simulate hgrc parsing
319 319 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
320 320 fp = repo.opener('hgrc', 'w')
321 321 fp.writelines(rcmaps)
322 322 fp.close()
323 323 ui.readconfig(repo.join('hgrc'))
324 324 if not opts.get('default'):
325 325 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
326 326 uisetup(ui)
327 327 reposetup(ui, repo)
328 328 for k, v in ui.configitems('extensions'):
329 329 if k.endswith('keyword'):
330 330 extension = '%s = %s' % (k, v)
331 331 break
332 332 demostatus('config using %s keyword template maps' % kwstatus)
333 333 ui.write('[extensions]\n%s\n' % extension)
334 334 demoitems('keyword', ui.configitems('keyword'))
335 335 demoitems('keywordmaps', kwmaps.iteritems())
336 336 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
337 337 repo.wopener(fn, 'w').write(keywords)
338 338 repo.add([fn])
339 339 path = repo.wjoin(fn)
340 340 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
341 341 ui.note(keywords)
342 342 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
343 343 # silence branch command if not verbose
344 344 quiet = ui.quiet
345 345 ui.quiet = not ui.verbose
346 346 commands.branch(ui, repo, branchname)
347 347 ui.quiet = quiet
348 348 for name, cmd in ui.configitems('hooks'):
349 349 if name.split('.', 1)[0].find('commit') > -1:
350 350 repo.ui.setconfig('hooks', name, '')
351 351 ui.note(_('unhooked all commit hooks\n'))
352 352 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
353 353 repo.commit(text=msg)
354 354 fmt = ui.verbose and ' in %s' % path or ''
355 355 demostatus('%s keywords expanded%s' % (kwstatus, fmt))
356 356 ui.write(repo.wread(fn))
357 357 ui.debug(_('\nremoving temporary repository %s\n') % tmpdir)
358 358 shutil.rmtree(tmpdir, ignore_errors=True)
359 359
360 360 def expand(ui, repo, *pats, **opts):
361 361 '''expand keywords in working directory
362 362
363 363 Run after (re)enabling keyword expansion.
364 364
365 365 kwexpand refuses to run if given files contain local changes.
366 366 '''
367 367 # 3rd argument sets expansion to True
368 368 _kwfwrite(ui, repo, True, *pats, **opts)
369 369
370 370 def files(ui, repo, *pats, **opts):
371 371 '''print files currently configured for keyword expansion
372 372
373 373 Crosscheck which files in working directory are potential targets
374 374 for keyword expansion. That is, files matched by [keyword] config
375 375 patterns but not symlinks.
376 376 '''
377 377 kwt = kwtools['templater']
378 378 status = _status(ui, repo, kwt, opts.get('untracked'), *pats, **opts)
379 379 modified, added, removed, deleted, unknown, ignored, clean = status
380 380 files = sorted(modified + added + clean + unknown)
381 381 wctx = repo[None]
382 382 kwfiles = [f for f in files if kwt.iskwfile(f, wctx.flags)]
383 383 cwd = pats and repo.getcwd() or ''
384 384 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
385 385 if opts.get('all') or opts.get('ignore'):
386 386 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
387 387 for char, filenames in kwfstats:
388 388 fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
389 389 for f in filenames:
390 390 ui.write(fmt % repo.pathto(f, cwd))
391 391
392 392 def shrink(ui, repo, *pats, **opts):
393 393 '''revert expanded keywords in working directory
394 394
395 395 Run before changing/disabling active keywords or if you experience
396 396 problems with "hg import" or "hg merge".
397 397
398 398 kwshrink refuses to run if given files contain local changes.
399 399 '''
400 400 # 3rd argument sets expansion to False
401 401 _kwfwrite(ui, repo, False, *pats, **opts)
402 402
403 403
404 404 def uisetup(ui):
405 405 '''Collects [keyword] config in kwtools.
406 406 Monkeypatches dispatch._parse if needed.'''
407 407
408 408 for pat, opt in ui.configitems('keyword'):
409 409 if opt != 'ignore':
410 410 kwtools['inc'].append(pat)
411 411 else:
412 412 kwtools['exc'].append(pat)
413 413
414 414 if kwtools['inc']:
415 415 def kwdispatch_parse(orig, ui, args):
416 416 '''Monkeypatch dispatch._parse to obtain running hg command.'''
417 417 cmd, func, args, options, cmdoptions = orig(ui, args)
418 418 kwtools['hgcmd'] = cmd
419 419 return cmd, func, args, options, cmdoptions
420 420
421 421 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
422 422
423 423 def reposetup(ui, repo):
424 424 '''Sets up repo as kwrepo for keyword substitution.
425 425 Overrides file method to return kwfilelog instead of filelog
426 426 if file matches user configuration.
427 427 Wraps commit to overwrite configured files with updated
428 428 keyword substitutions.
429 429 Monkeypatches patch and webcommands.'''
430 430
431 431 try:
432 432 if (not repo.local() or not kwtools['inc']
433 433 or kwtools['hgcmd'] in nokwcommands.split()
434 434 or '.hg' in util.splitpath(repo.root)
435 435 or repo._url.startswith('bundle:')):
436 436 return
437 437 except AttributeError:
438 438 pass
439 439
440 440 kwtools['templater'] = kwt = kwtemplater(ui, repo)
441 441
442 442 class kwrepo(repo.__class__):
443 443 def file(self, f):
444 444 if f[0] == '/':
445 445 f = f[1:]
446 446 return kwfilelog(self.sopener, kwt, f)
447 447
448 448 def wread(self, filename):
449 449 data = super(kwrepo, self).wread(filename)
450 450 return kwt.wread(filename, data)
451 451
452 452 def commit(self, files=None, text='', user=None, date=None,
453 match=None, force=False, force_editor=False,
454 extra={}, empty_ok=False):
453 match=None, force=False, editor=None, extra={}):
455 454 wlock = lock = None
456 455 _p1 = _p2 = None
457 456 try:
458 457 wlock = self.wlock()
459 458 lock = self.lock()
460 459 # store and postpone commit hooks
461 460 commithooks = {}
462 461 for name, cmd in ui.configitems('hooks'):
463 462 if name.split('.', 1)[0] == 'commit':
464 463 commithooks[name] = cmd
465 464 ui.setconfig('hooks', name, None)
466 465 if commithooks:
467 466 # store parents for commit hook environment
468 467 _p1, _p2 = repo.dirstate.parents()
469 468 _p1 = hex(_p1)
470 469 if _p2 == nullid:
471 470 _p2 = ''
472 471 else:
473 472 _p2 = hex(_p2)
474 473
475 474 n = super(kwrepo, self).commit(files, text, user, date, match,
476 force, force_editor,
477 extra, empty_ok)
475 force, editor, extra)
478 476
479 477 # restore commit hooks
480 478 for name, cmd in commithooks.iteritems():
481 479 ui.setconfig('hooks', name, cmd)
482 480 if n is not None:
483 481 kwt.overwrite(n, True, None)
484 482 repo.hook('commit', node=n, parent1=_p1, parent2=_p2)
485 483 return n
486 484 finally:
487 485 release(lock, wlock)
488 486
489 487 # monkeypatches
490 488 def kwpatchfile_init(orig, self, ui, fname, opener, missing=False):
491 489 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
492 490 rejects or conflicts due to expanded keywords in working dir.'''
493 491 orig(self, ui, fname, opener, missing)
494 492 # shrink keywords read from working dir
495 493 self.lines = kwt.shrinklines(self.fname, self.lines)
496 494
497 495 def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
498 496 opts=None):
499 497 '''Monkeypatch patch.diff to avoid expansion except when
500 498 comparing against working dir.'''
501 499 if node2 is not None:
502 500 kwt.matcher = util.never
503 501 elif node1 is not None and node1 != repo['.'].node():
504 502 kwt.restrict = True
505 503 return orig(repo, node1, node2, match, changes, opts)
506 504
507 505 def kwweb_skip(orig, web, req, tmpl):
508 506 '''Wraps webcommands.x turning off keyword expansion.'''
509 507 kwt.matcher = util.never
510 508 return orig(web, req, tmpl)
511 509
512 510 repo.__class__ = kwrepo
513 511
514 512 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
515 513 extensions.wrapfunction(patch, 'diff', kw_diff)
516 514 for c in 'annotate changeset rev filediff diff'.split():
517 515 extensions.wrapfunction(webcommands, c, kwweb_skip)
518 516
519 517 cmdtable = {
520 518 'kwdemo':
521 519 (demo,
522 520 [('d', 'default', None, _('show default keyword template maps')),
523 521 ('f', 'rcfile', [], _('read maps from rcfile'))],
524 522 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
525 523 'kwexpand': (expand, commands.walkopts,
526 524 _('hg kwexpand [OPTION]... [FILE]...')),
527 525 'kwfiles':
528 526 (files,
529 527 [('a', 'all', None, _('show keyword status flags of all files')),
530 528 ('i', 'ignore', None, _('show files excluded from expansion')),
531 529 ('u', 'untracked', None, _('additionally show untracked files')),
532 530 ] + commands.walkopts,
533 531 _('hg kwfiles [OPTION]... [FILE]...')),
534 532 'kwshrink': (shrink, commands.walkopts,
535 533 _('hg kwshrink [OPTION]... [FILE]...')),
536 534 }
@@ -1,1223 +1,1260 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, bisect, stat, errno
11 11 import mdiff, bdiff, util, templater, patch, error, encoding
12 12 import match as _match
13 13
14 14 revrangesep = ':'
15 15
16 16 def findpossible(cmd, table, strict=False):
17 17 """
18 18 Return cmd -> (aliases, command table entry)
19 19 for each matching command.
20 20 Return debug commands (or their aliases) only if no normal command matches.
21 21 """
22 22 choice = {}
23 23 debugchoice = {}
24 24 for e in table.keys():
25 25 aliases = e.lstrip("^").split("|")
26 26 found = None
27 27 if cmd in aliases:
28 28 found = cmd
29 29 elif not strict:
30 30 for a in aliases:
31 31 if a.startswith(cmd):
32 32 found = a
33 33 break
34 34 if found is not None:
35 35 if aliases[0].startswith("debug") or found.startswith("debug"):
36 36 debugchoice[found] = (aliases, table[e])
37 37 else:
38 38 choice[found] = (aliases, table[e])
39 39
40 40 if not choice and debugchoice:
41 41 choice = debugchoice
42 42
43 43 return choice
44 44
45 45 def findcmd(cmd, table, strict=True):
46 46 """Return (aliases, command table entry) for command string."""
47 47 choice = findpossible(cmd, table, strict)
48 48
49 49 if cmd in choice:
50 50 return choice[cmd]
51 51
52 52 if len(choice) > 1:
53 53 clist = choice.keys()
54 54 clist.sort()
55 55 raise error.AmbiguousCommand(cmd, clist)
56 56
57 57 if choice:
58 58 return choice.values()[0]
59 59
60 60 raise error.UnknownCommand(cmd)
61 61
62 62 def bail_if_changed(repo):
63 63 if repo.dirstate.parents()[1] != nullid:
64 64 raise util.Abort(_('outstanding uncommitted merge'))
65 65 modified, added, removed, deleted = repo.status()[:4]
66 66 if modified or added or removed or deleted:
67 67 raise util.Abort(_("outstanding uncommitted changes"))
68 68
69 69 def logmessage(opts):
70 70 """ get the log message according to -m and -l option """
71 71 message = opts.get('message')
72 72 logfile = opts.get('logfile')
73 73
74 74 if message and logfile:
75 75 raise util.Abort(_('options --message and --logfile are mutually '
76 76 'exclusive'))
77 77 if not message and logfile:
78 78 try:
79 79 if logfile == '-':
80 80 message = sys.stdin.read()
81 81 else:
82 82 message = open(logfile).read()
83 83 except IOError, inst:
84 84 raise util.Abort(_("can't read commit message '%s': %s") %
85 85 (logfile, inst.strerror))
86 86 return message
87 87
88 88 def loglimit(opts):
89 89 """get the log limit according to option -l/--limit"""
90 90 limit = opts.get('limit')
91 91 if limit:
92 92 try:
93 93 limit = int(limit)
94 94 except ValueError:
95 95 raise util.Abort(_('limit must be a positive integer'))
96 96 if limit <= 0: raise util.Abort(_('limit must be positive'))
97 97 else:
98 98 limit = sys.maxint
99 99 return limit
100 100
101 101 def remoteui(src, opts):
102 102 'build a remote ui from ui or repo and opts'
103 103 if hasattr(src, 'baseui'): # looks like a repository
104 104 dst = src.baseui # drop repo-specific config
105 105 src = src.ui # copy target options from repo
106 106 else: # assume it's a global ui object
107 107 dst = src # keep all global options
108 108
109 109 # copy ssh-specific options
110 110 for o in 'ssh', 'remotecmd':
111 111 v = opts.get(o) or src.config('ui', o)
112 112 if v:
113 113 dst.setconfig("ui", o, v)
114 114 # copy bundle-specific options
115 115 r = src.config('bundle', 'mainreporoot')
116 116 if r:
117 117 dst.setconfig('bundle', 'mainreporoot', r)
118 118
119 119 return dst
120 120
121 121 def revpair(repo, revs):
122 122 '''return pair of nodes, given list of revisions. second item can
123 123 be None, meaning use working dir.'''
124 124
125 125 def revfix(repo, val, defval):
126 126 if not val and val != 0 and defval is not None:
127 127 val = defval
128 128 return repo.lookup(val)
129 129
130 130 if not revs:
131 131 return repo.dirstate.parents()[0], None
132 132 end = None
133 133 if len(revs) == 1:
134 134 if revrangesep in revs[0]:
135 135 start, end = revs[0].split(revrangesep, 1)
136 136 start = revfix(repo, start, 0)
137 137 end = revfix(repo, end, len(repo) - 1)
138 138 else:
139 139 start = revfix(repo, revs[0], None)
140 140 elif len(revs) == 2:
141 141 if revrangesep in revs[0] or revrangesep in revs[1]:
142 142 raise util.Abort(_('too many revisions specified'))
143 143 start = revfix(repo, revs[0], None)
144 144 end = revfix(repo, revs[1], None)
145 145 else:
146 146 raise util.Abort(_('too many revisions specified'))
147 147 return start, end
148 148
149 149 def revrange(repo, revs):
150 150 """Yield revision as strings from a list of revision specifications."""
151 151
152 152 def revfix(repo, val, defval):
153 153 if not val and val != 0 and defval is not None:
154 154 return defval
155 155 return repo.changelog.rev(repo.lookup(val))
156 156
157 157 seen, l = set(), []
158 158 for spec in revs:
159 159 if revrangesep in spec:
160 160 start, end = spec.split(revrangesep, 1)
161 161 start = revfix(repo, start, 0)
162 162 end = revfix(repo, end, len(repo) - 1)
163 163 step = start > end and -1 or 1
164 164 for rev in xrange(start, end+step, step):
165 165 if rev in seen:
166 166 continue
167 167 seen.add(rev)
168 168 l.append(rev)
169 169 else:
170 170 rev = revfix(repo, spec, None)
171 171 if rev in seen:
172 172 continue
173 173 seen.add(rev)
174 174 l.append(rev)
175 175
176 176 return l
177 177
178 178 def make_filename(repo, pat, node,
179 179 total=None, seqno=None, revwidth=None, pathname=None):
180 180 node_expander = {
181 181 'H': lambda: hex(node),
182 182 'R': lambda: str(repo.changelog.rev(node)),
183 183 'h': lambda: short(node),
184 184 }
185 185 expander = {
186 186 '%': lambda: '%',
187 187 'b': lambda: os.path.basename(repo.root),
188 188 }
189 189
190 190 try:
191 191 if node:
192 192 expander.update(node_expander)
193 193 if node:
194 194 expander['r'] = (lambda:
195 195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
196 196 if total is not None:
197 197 expander['N'] = lambda: str(total)
198 198 if seqno is not None:
199 199 expander['n'] = lambda: str(seqno)
200 200 if total is not None and seqno is not None:
201 201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
202 202 if pathname is not None:
203 203 expander['s'] = lambda: os.path.basename(pathname)
204 204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
205 205 expander['p'] = lambda: pathname
206 206
207 207 newname = []
208 208 patlen = len(pat)
209 209 i = 0
210 210 while i < patlen:
211 211 c = pat[i]
212 212 if c == '%':
213 213 i += 1
214 214 c = pat[i]
215 215 c = expander[c]()
216 216 newname.append(c)
217 217 i += 1
218 218 return ''.join(newname)
219 219 except KeyError, inst:
220 220 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
221 221 inst.args[0])
222 222
223 223 def make_file(repo, pat, node=None,
224 224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
225 225
226 226 writable = 'w' in mode or 'a' in mode
227 227
228 228 if not pat or pat == '-':
229 229 return writable and sys.stdout or sys.stdin
230 230 if hasattr(pat, 'write') and writable:
231 231 return pat
232 232 if hasattr(pat, 'read') and 'r' in mode:
233 233 return pat
234 234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
235 235 pathname),
236 236 mode)
237 237
238 238 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
239 239 if not globbed and default == 'relpath':
240 240 pats = util.expand_glob(pats or [])
241 241 m = _match.match(repo.root, repo.getcwd(), pats,
242 242 opts.get('include'), opts.get('exclude'), default)
243 243 def badfn(f, msg):
244 244 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
245 245 return False
246 246 m.bad = badfn
247 247 return m
248 248
249 249 def matchall(repo):
250 250 return _match.always(repo.root, repo.getcwd())
251 251
252 252 def matchfiles(repo, files):
253 253 return _match.exact(repo.root, repo.getcwd(), files)
254 254
255 255 def findrenames(repo, added=None, removed=None, threshold=0.5):
256 256 '''find renamed files -- yields (before, after, score) tuples'''
257 257 if added is None or removed is None:
258 258 added, removed = repo.status()[1:3]
259 259 ctx = repo['.']
260 260 for a in added:
261 261 aa = repo.wread(a)
262 262 bestname, bestscore = None, threshold
263 263 for r in removed:
264 264 rr = ctx.filectx(r).data()
265 265
266 266 # bdiff.blocks() returns blocks of matching lines
267 267 # count the number of bytes in each
268 268 equal = 0
269 269 alines = mdiff.splitnewlines(aa)
270 270 matches = bdiff.blocks(aa, rr)
271 271 for x1,x2,y1,y2 in matches:
272 272 for line in alines[x1:x2]:
273 273 equal += len(line)
274 274
275 275 lengths = len(aa) + len(rr)
276 276 if lengths:
277 277 myscore = equal*2.0 / lengths
278 278 if myscore >= bestscore:
279 279 bestname, bestscore = r, myscore
280 280 if bestname:
281 281 yield bestname, a, bestscore
282 282
283 283 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
284 284 if dry_run is None:
285 285 dry_run = opts.get('dry_run')
286 286 if similarity is None:
287 287 similarity = float(opts.get('similarity') or 0)
288 288 add, remove = [], []
289 289 mapping = {}
290 290 audit_path = util.path_auditor(repo.root)
291 291 m = match(repo, pats, opts)
292 292 for abs in repo.walk(m):
293 293 target = repo.wjoin(abs)
294 294 good = True
295 295 try:
296 296 audit_path(abs)
297 297 except:
298 298 good = False
299 299 rel = m.rel(abs)
300 300 exact = m.exact(abs)
301 301 if good and abs not in repo.dirstate:
302 302 add.append(abs)
303 303 mapping[abs] = rel, m.exact(abs)
304 304 if repo.ui.verbose or not exact:
305 305 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
306 306 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
307 307 or (os.path.isdir(target) and not os.path.islink(target))):
308 308 remove.append(abs)
309 309 mapping[abs] = rel, exact
310 310 if repo.ui.verbose or not exact:
311 311 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
312 312 if not dry_run:
313 313 repo.remove(remove)
314 314 repo.add(add)
315 315 if similarity > 0:
316 316 for old, new, score in findrenames(repo, add, remove, similarity):
317 317 oldrel, oldexact = mapping[old]
318 318 newrel, newexact = mapping[new]
319 319 if repo.ui.verbose or not oldexact or not newexact:
320 320 repo.ui.status(_('recording removal of %s as rename to %s '
321 321 '(%d%% similar)\n') %
322 322 (oldrel, newrel, score * 100))
323 323 if not dry_run:
324 324 repo.copy(old, new)
325 325
326 326 def copy(ui, repo, pats, opts, rename=False):
327 327 # called with the repo lock held
328 328 #
329 329 # hgsep => pathname that uses "/" to separate directories
330 330 # ossep => pathname that uses os.sep to separate directories
331 331 cwd = repo.getcwd()
332 332 targets = {}
333 333 after = opts.get("after")
334 334 dryrun = opts.get("dry_run")
335 335
336 336 def walkpat(pat):
337 337 srcs = []
338 338 m = match(repo, [pat], opts, globbed=True)
339 339 for abs in repo.walk(m):
340 340 state = repo.dirstate[abs]
341 341 rel = m.rel(abs)
342 342 exact = m.exact(abs)
343 343 if state in '?r':
344 344 if exact and state == '?':
345 345 ui.warn(_('%s: not copying - file is not managed\n') % rel)
346 346 if exact and state == 'r':
347 347 ui.warn(_('%s: not copying - file has been marked for'
348 348 ' remove\n') % rel)
349 349 continue
350 350 # abs: hgsep
351 351 # rel: ossep
352 352 srcs.append((abs, rel, exact))
353 353 return srcs
354 354
355 355 # abssrc: hgsep
356 356 # relsrc: ossep
357 357 # otarget: ossep
358 358 def copyfile(abssrc, relsrc, otarget, exact):
359 359 abstarget = util.canonpath(repo.root, cwd, otarget)
360 360 reltarget = repo.pathto(abstarget, cwd)
361 361 target = repo.wjoin(abstarget)
362 362 src = repo.wjoin(abssrc)
363 363 state = repo.dirstate[abstarget]
364 364
365 365 # check for collisions
366 366 prevsrc = targets.get(abstarget)
367 367 if prevsrc is not None:
368 368 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
369 369 (reltarget, repo.pathto(abssrc, cwd),
370 370 repo.pathto(prevsrc, cwd)))
371 371 return
372 372
373 373 # check for overwrites
374 374 exists = os.path.exists(target)
375 375 if not after and exists or after and state in 'mn':
376 376 if not opts['force']:
377 377 ui.warn(_('%s: not overwriting - file exists\n') %
378 378 reltarget)
379 379 return
380 380
381 381 if after:
382 382 if not exists:
383 383 return
384 384 elif not dryrun:
385 385 try:
386 386 if exists:
387 387 os.unlink(target)
388 388 targetdir = os.path.dirname(target) or '.'
389 389 if not os.path.isdir(targetdir):
390 390 os.makedirs(targetdir)
391 391 util.copyfile(src, target)
392 392 except IOError, inst:
393 393 if inst.errno == errno.ENOENT:
394 394 ui.warn(_('%s: deleted in working copy\n') % relsrc)
395 395 else:
396 396 ui.warn(_('%s: cannot copy - %s\n') %
397 397 (relsrc, inst.strerror))
398 398 return True # report a failure
399 399
400 400 if ui.verbose or not exact:
401 401 if rename:
402 402 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
403 403 else:
404 404 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
405 405
406 406 targets[abstarget] = abssrc
407 407
408 408 # fix up dirstate
409 409 origsrc = repo.dirstate.copied(abssrc) or abssrc
410 410 if abstarget == origsrc: # copying back a copy?
411 411 if state not in 'mn' and not dryrun:
412 412 repo.dirstate.normallookup(abstarget)
413 413 else:
414 414 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
415 415 if not ui.quiet:
416 416 ui.warn(_("%s has not been committed yet, so no copy "
417 417 "data will be stored for %s.\n")
418 418 % (repo.pathto(origsrc, cwd), reltarget))
419 419 if repo.dirstate[abstarget] in '?r' and not dryrun:
420 420 repo.add([abstarget])
421 421 elif not dryrun:
422 422 repo.copy(origsrc, abstarget)
423 423
424 424 if rename and not dryrun:
425 425 repo.remove([abssrc], not after)
426 426
427 427 # pat: ossep
428 428 # dest ossep
429 429 # srcs: list of (hgsep, hgsep, ossep, bool)
430 430 # return: function that takes hgsep and returns ossep
431 431 def targetpathfn(pat, dest, srcs):
432 432 if os.path.isdir(pat):
433 433 abspfx = util.canonpath(repo.root, cwd, pat)
434 434 abspfx = util.localpath(abspfx)
435 435 if destdirexists:
436 436 striplen = len(os.path.split(abspfx)[0])
437 437 else:
438 438 striplen = len(abspfx)
439 439 if striplen:
440 440 striplen += len(os.sep)
441 441 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
442 442 elif destdirexists:
443 443 res = lambda p: os.path.join(dest,
444 444 os.path.basename(util.localpath(p)))
445 445 else:
446 446 res = lambda p: dest
447 447 return res
448 448
449 449 # pat: ossep
450 450 # dest ossep
451 451 # srcs: list of (hgsep, hgsep, ossep, bool)
452 452 # return: function that takes hgsep and returns ossep
453 453 def targetpathafterfn(pat, dest, srcs):
454 454 if util.patkind(pat, None)[0]:
455 455 # a mercurial pattern
456 456 res = lambda p: os.path.join(dest,
457 457 os.path.basename(util.localpath(p)))
458 458 else:
459 459 abspfx = util.canonpath(repo.root, cwd, pat)
460 460 if len(abspfx) < len(srcs[0][0]):
461 461 # A directory. Either the target path contains the last
462 462 # component of the source path or it does not.
463 463 def evalpath(striplen):
464 464 score = 0
465 465 for s in srcs:
466 466 t = os.path.join(dest, util.localpath(s[0])[striplen:])
467 467 if os.path.exists(t):
468 468 score += 1
469 469 return score
470 470
471 471 abspfx = util.localpath(abspfx)
472 472 striplen = len(abspfx)
473 473 if striplen:
474 474 striplen += len(os.sep)
475 475 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
476 476 score = evalpath(striplen)
477 477 striplen1 = len(os.path.split(abspfx)[0])
478 478 if striplen1:
479 479 striplen1 += len(os.sep)
480 480 if evalpath(striplen1) > score:
481 481 striplen = striplen1
482 482 res = lambda p: os.path.join(dest,
483 483 util.localpath(p)[striplen:])
484 484 else:
485 485 # a file
486 486 if destdirexists:
487 487 res = lambda p: os.path.join(dest,
488 488 os.path.basename(util.localpath(p)))
489 489 else:
490 490 res = lambda p: dest
491 491 return res
492 492
493 493
494 494 pats = util.expand_glob(pats)
495 495 if not pats:
496 496 raise util.Abort(_('no source or destination specified'))
497 497 if len(pats) == 1:
498 498 raise util.Abort(_('no destination specified'))
499 499 dest = pats.pop()
500 500 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
501 501 if not destdirexists:
502 502 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
503 503 raise util.Abort(_('with multiple sources, destination must be an '
504 504 'existing directory'))
505 505 if util.endswithsep(dest):
506 506 raise util.Abort(_('destination %s is not a directory') % dest)
507 507
508 508 tfn = targetpathfn
509 509 if after:
510 510 tfn = targetpathafterfn
511 511 copylist = []
512 512 for pat in pats:
513 513 srcs = walkpat(pat)
514 514 if not srcs:
515 515 continue
516 516 copylist.append((tfn(pat, dest, srcs), srcs))
517 517 if not copylist:
518 518 raise util.Abort(_('no files to copy'))
519 519
520 520 errors = 0
521 521 for targetpath, srcs in copylist:
522 522 for abssrc, relsrc, exact in srcs:
523 523 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
524 524 errors += 1
525 525
526 526 if errors:
527 527 ui.warn(_('(consider using --after)\n'))
528 528
529 529 return errors
530 530
531 531 def service(opts, parentfn=None, initfn=None, runfn=None):
532 532 '''Run a command as a service.'''
533 533
534 534 if opts['daemon'] and not opts['daemon_pipefds']:
535 535 rfd, wfd = os.pipe()
536 536 args = sys.argv[:]
537 537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
538 538 # Don't pass --cwd to the child process, because we've already
539 539 # changed directory.
540 540 for i in xrange(1,len(args)):
541 541 if args[i].startswith('--cwd='):
542 542 del args[i]
543 543 break
544 544 elif args[i].startswith('--cwd'):
545 545 del args[i:i+2]
546 546 break
547 547 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
548 548 args[0], args)
549 549 os.close(wfd)
550 550 os.read(rfd, 1)
551 551 if parentfn:
552 552 return parentfn(pid)
553 553 else:
554 554 os._exit(0)
555 555
556 556 if initfn:
557 557 initfn()
558 558
559 559 if opts['pid_file']:
560 560 fp = open(opts['pid_file'], 'w')
561 561 fp.write(str(os.getpid()) + '\n')
562 562 fp.close()
563 563
564 564 if opts['daemon_pipefds']:
565 565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
566 566 os.close(rfd)
567 567 try:
568 568 os.setsid()
569 569 except AttributeError:
570 570 pass
571 571 os.write(wfd, 'y')
572 572 os.close(wfd)
573 573 sys.stdout.flush()
574 574 sys.stderr.flush()
575 575 fd = os.open(util.nulldev, os.O_RDWR)
576 576 if fd != 0: os.dup2(fd, 0)
577 577 if fd != 1: os.dup2(fd, 1)
578 578 if fd != 2: os.dup2(fd, 2)
579 579 if fd not in (0, 1, 2): os.close(fd)
580 580
581 581 if runfn:
582 582 return runfn()
583 583
584 584 class changeset_printer(object):
585 585 '''show changeset information when templating not requested.'''
586 586
587 587 def __init__(self, ui, repo, patch, diffopts, buffered):
588 588 self.ui = ui
589 589 self.repo = repo
590 590 self.buffered = buffered
591 591 self.patch = patch
592 592 self.diffopts = diffopts
593 593 self.header = {}
594 594 self.hunk = {}
595 595 self.lastheader = None
596 596
597 597 def flush(self, rev):
598 598 if rev in self.header:
599 599 h = self.header[rev]
600 600 if h != self.lastheader:
601 601 self.lastheader = h
602 602 self.ui.write(h)
603 603 del self.header[rev]
604 604 if rev in self.hunk:
605 605 self.ui.write(self.hunk[rev])
606 606 del self.hunk[rev]
607 607 return 1
608 608 return 0
609 609
610 610 def show(self, ctx, copies=(), **props):
611 611 if self.buffered:
612 612 self.ui.pushbuffer()
613 613 self._show(ctx, copies, props)
614 614 self.hunk[ctx.rev()] = self.ui.popbuffer()
615 615 else:
616 616 self._show(ctx, copies, props)
617 617
618 618 def _show(self, ctx, copies, props):
619 619 '''show a single changeset or file revision'''
620 620 changenode = ctx.node()
621 621 rev = ctx.rev()
622 622
623 623 if self.ui.quiet:
624 624 self.ui.write("%d:%s\n" % (rev, short(changenode)))
625 625 return
626 626
627 627 log = self.repo.changelog
628 628 changes = log.read(changenode)
629 629 date = util.datestr(changes[2])
630 630 extra = changes[5]
631 631 branch = extra.get("branch")
632 632
633 633 hexfunc = self.ui.debugflag and hex or short
634 634
635 635 parents = [(p, hexfunc(log.node(p)))
636 636 for p in self._meaningful_parentrevs(log, rev)]
637 637
638 638 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
639 639
640 640 # don't show the default branch name
641 641 if branch != 'default':
642 642 branch = encoding.tolocal(branch)
643 643 self.ui.write(_("branch: %s\n") % branch)
644 644 for tag in self.repo.nodetags(changenode):
645 645 self.ui.write(_("tag: %s\n") % tag)
646 646 for parent in parents:
647 647 self.ui.write(_("parent: %d:%s\n") % parent)
648 648
649 649 if self.ui.debugflag:
650 650 self.ui.write(_("manifest: %d:%s\n") %
651 651 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
652 652 self.ui.write(_("user: %s\n") % changes[1])
653 653 self.ui.write(_("date: %s\n") % date)
654 654
655 655 if self.ui.debugflag:
656 656 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
657 657 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
658 658 files):
659 659 if value:
660 660 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
661 661 elif changes[3] and self.ui.verbose:
662 662 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
663 663 if copies and self.ui.verbose:
664 664 copies = ['%s (%s)' % c for c in copies]
665 665 self.ui.write(_("copies: %s\n") % ' '.join(copies))
666 666
667 667 if extra and self.ui.debugflag:
668 668 for key, value in sorted(extra.items()):
669 669 self.ui.write(_("extra: %s=%s\n")
670 670 % (key, value.encode('string_escape')))
671 671
672 672 description = changes[4].strip()
673 673 if description:
674 674 if self.ui.verbose:
675 675 self.ui.write(_("description:\n"))
676 676 self.ui.write(description)
677 677 self.ui.write("\n\n")
678 678 else:
679 679 self.ui.write(_("summary: %s\n") %
680 680 description.splitlines()[0])
681 681 self.ui.write("\n")
682 682
683 683 self.showpatch(changenode)
684 684
685 685 def showpatch(self, node):
686 686 if self.patch:
687 687 prev = self.repo.changelog.parents(node)[0]
688 688 chunks = patch.diff(self.repo, prev, node, match=self.patch,
689 689 opts=patch.diffopts(self.ui, self.diffopts))
690 690 for chunk in chunks:
691 691 self.ui.write(chunk)
692 692 self.ui.write("\n")
693 693
694 694 def _meaningful_parentrevs(self, log, rev):
695 695 """Return list of meaningful (or all if debug) parentrevs for rev.
696 696
697 697 For merges (two non-nullrev revisions) both parents are meaningful.
698 698 Otherwise the first parent revision is considered meaningful if it
699 699 is not the preceding revision.
700 700 """
701 701 parents = log.parentrevs(rev)
702 702 if not self.ui.debugflag and parents[1] == nullrev:
703 703 if parents[0] >= rev - 1:
704 704 parents = []
705 705 else:
706 706 parents = [parents[0]]
707 707 return parents
708 708
709 709
710 710 class changeset_templater(changeset_printer):
711 711 '''format changeset information.'''
712 712
713 713 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
714 714 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
715 715 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
716 716 self.t = templater.templater(mapfile, {'formatnode': formatnode},
717 717 cache={
718 718 'parent': '{rev}:{node|formatnode} ',
719 719 'manifest': '{rev}:{node|formatnode}',
720 720 'filecopy': '{name} ({source})'})
721 721
722 722 def use_template(self, t):
723 723 '''set template string to use'''
724 724 self.t.cache['changeset'] = t
725 725
726 726 def _meaningful_parentrevs(self, ctx):
727 727 """Return list of meaningful (or all if debug) parentrevs for rev.
728 728 """
729 729 parents = ctx.parents()
730 730 if len(parents) > 1:
731 731 return parents
732 732 if self.ui.debugflag:
733 733 return [parents[0], self.repo['null']]
734 734 if parents[0].rev() >= ctx.rev() - 1:
735 735 return []
736 736 return parents
737 737
738 738 def _show(self, ctx, copies, props):
739 739 '''show a single changeset or file revision'''
740 740
741 741 def showlist(name, values, plural=None, **args):
742 742 '''expand set of values.
743 743 name is name of key in template map.
744 744 values is list of strings or dicts.
745 745 plural is plural of name, if not simply name + 's'.
746 746
747 747 expansion works like this, given name 'foo'.
748 748
749 749 if values is empty, expand 'no_foos'.
750 750
751 751 if 'foo' not in template map, return values as a string,
752 752 joined by space.
753 753
754 754 expand 'start_foos'.
755 755
756 756 for each value, expand 'foo'. if 'last_foo' in template
757 757 map, expand it instead of 'foo' for last key.
758 758
759 759 expand 'end_foos'.
760 760 '''
761 761 if plural: names = plural
762 762 else: names = name + 's'
763 763 if not values:
764 764 noname = 'no_' + names
765 765 if noname in self.t:
766 766 yield self.t(noname, **args)
767 767 return
768 768 if name not in self.t:
769 769 if isinstance(values[0], str):
770 770 yield ' '.join(values)
771 771 else:
772 772 for v in values:
773 773 yield dict(v, **args)
774 774 return
775 775 startname = 'start_' + names
776 776 if startname in self.t:
777 777 yield self.t(startname, **args)
778 778 vargs = args.copy()
779 779 def one(v, tag=name):
780 780 try:
781 781 vargs.update(v)
782 782 except (AttributeError, ValueError):
783 783 try:
784 784 for a, b in v:
785 785 vargs[a] = b
786 786 except ValueError:
787 787 vargs[name] = v
788 788 return self.t(tag, **vargs)
789 789 lastname = 'last_' + name
790 790 if lastname in self.t:
791 791 last = values.pop()
792 792 else:
793 793 last = None
794 794 for v in values:
795 795 yield one(v)
796 796 if last is not None:
797 797 yield one(last, tag=lastname)
798 798 endname = 'end_' + names
799 799 if endname in self.t:
800 800 yield self.t(endname, **args)
801 801
802 802 def showbranches(**args):
803 803 branch = ctx.branch()
804 804 if branch != 'default':
805 805 branch = encoding.tolocal(branch)
806 806 return showlist('branch', [branch], plural='branches', **args)
807 807
808 808 def showparents(**args):
809 809 parents = [[('rev', p.rev()), ('node', p.hex())]
810 810 for p in self._meaningful_parentrevs(ctx)]
811 811 return showlist('parent', parents, **args)
812 812
813 813 def showtags(**args):
814 814 return showlist('tag', ctx.tags(), **args)
815 815
816 816 def showextras(**args):
817 817 for key, value in sorted(ctx.extra().items()):
818 818 args = args.copy()
819 819 args.update(dict(key=key, value=value))
820 820 yield self.t('extra', **args)
821 821
822 822 def showcopies(**args):
823 823 c = [{'name': x[0], 'source': x[1]} for x in copies]
824 824 return showlist('file_copy', c, plural='file_copies', **args)
825 825
826 826 files = []
827 827 def getfiles():
828 828 if not files:
829 829 files[:] = self.repo.status(ctx.parents()[0].node(),
830 830 ctx.node())[:3]
831 831 return files
832 832 def showfiles(**args):
833 833 return showlist('file', ctx.files(), **args)
834 834 def showmods(**args):
835 835 return showlist('file_mod', getfiles()[0], **args)
836 836 def showadds(**args):
837 837 return showlist('file_add', getfiles()[1], **args)
838 838 def showdels(**args):
839 839 return showlist('file_del', getfiles()[2], **args)
840 840 def showmanifest(**args):
841 841 args = args.copy()
842 842 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
843 843 node=hex(ctx.changeset()[0])))
844 844 return self.t('manifest', **args)
845 845
846 846 def showdiffstat(**args):
847 847 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
848 848 files, adds, removes = 0, 0, 0
849 849 for i in patch.diffstatdata(util.iterlines(diff)):
850 850 files += 1
851 851 adds += i[1]
852 852 removes += i[2]
853 853 return '%s: +%s/-%s' % (files, adds, removes)
854 854
855 855 defprops = {
856 856 'author': ctx.user(),
857 857 'branches': showbranches,
858 858 'date': ctx.date(),
859 859 'desc': ctx.description().strip(),
860 860 'file_adds': showadds,
861 861 'file_dels': showdels,
862 862 'file_mods': showmods,
863 863 'files': showfiles,
864 864 'file_copies': showcopies,
865 865 'manifest': showmanifest,
866 866 'node': ctx.hex(),
867 867 'parents': showparents,
868 868 'rev': ctx.rev(),
869 869 'tags': showtags,
870 870 'extras': showextras,
871 871 'diffstat': showdiffstat,
872 872 }
873 873 props = props.copy()
874 874 props.update(defprops)
875 875
876 876 # find correct templates for current mode
877 877
878 878 tmplmodes = [
879 879 (True, None),
880 880 (self.ui.verbose, 'verbose'),
881 881 (self.ui.quiet, 'quiet'),
882 882 (self.ui.debugflag, 'debug'),
883 883 ]
884 884
885 885 types = {'header': '', 'changeset': 'changeset'}
886 886 for mode, postfix in tmplmodes:
887 887 for type in types:
888 888 cur = postfix and ('%s_%s' % (type, postfix)) or type
889 889 if mode and cur in self.t:
890 890 types[type] = cur
891 891
892 892 try:
893 893
894 894 # write header
895 895 if types['header']:
896 896 h = templater.stringify(self.t(types['header'], **props))
897 897 if self.buffered:
898 898 self.header[ctx.rev()] = h
899 899 else:
900 900 self.ui.write(h)
901 901
902 902 # write changeset metadata, then patch if requested
903 903 key = types['changeset']
904 904 self.ui.write(templater.stringify(self.t(key, **props)))
905 905 self.showpatch(ctx.node())
906 906
907 907 except KeyError, inst:
908 908 msg = _("%s: no key named '%s'")
909 909 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
910 910 except SyntaxError, inst:
911 911 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
912 912
913 913 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
914 914 """show one changeset using template or regular display.
915 915
916 916 Display format will be the first non-empty hit of:
917 917 1. option 'template'
918 918 2. option 'style'
919 919 3. [ui] setting 'logtemplate'
920 920 4. [ui] setting 'style'
921 921 If all of these values are either the unset or the empty string,
922 922 regular display via changeset_printer() is done.
923 923 """
924 924 # options
925 925 patch = False
926 926 if opts.get('patch'):
927 927 patch = matchfn or matchall(repo)
928 928
929 929 tmpl = opts.get('template')
930 930 style = None
931 931 if tmpl:
932 932 tmpl = templater.parsestring(tmpl, quoted=False)
933 933 else:
934 934 style = opts.get('style')
935 935
936 936 # ui settings
937 937 if not (tmpl or style):
938 938 tmpl = ui.config('ui', 'logtemplate')
939 939 if tmpl:
940 940 tmpl = templater.parsestring(tmpl)
941 941 else:
942 942 style = ui.config('ui', 'style')
943 943
944 944 if not (tmpl or style):
945 945 return changeset_printer(ui, repo, patch, opts, buffered)
946 946
947 947 mapfile = None
948 948 if style and not tmpl:
949 949 mapfile = style
950 950 if not os.path.split(mapfile)[0]:
951 951 mapname = (templater.templatepath('map-cmdline.' + mapfile)
952 952 or templater.templatepath(mapfile))
953 953 if mapname: mapfile = mapname
954 954
955 955 try:
956 956 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
957 957 except SyntaxError, inst:
958 958 raise util.Abort(inst.args[0])
959 959 if tmpl: t.use_template(tmpl)
960 960 return t
961 961
962 962 def finddate(ui, repo, date):
963 963 """Find the tipmost changeset that matches the given date spec"""
964 964 df = util.matchdate(date)
965 965 get = util.cachefunc(lambda r: repo[r].changeset())
966 966 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
967 967 results = {}
968 968 for st, rev, fns in changeiter:
969 969 if st == 'add':
970 970 d = get(rev)[2]
971 971 if df(d[0]):
972 972 results[rev] = d
973 973 elif st == 'iter':
974 974 if rev in results:
975 975 ui.status(_("Found revision %s from %s\n") %
976 976 (rev, util.datestr(results[rev])))
977 977 return str(rev)
978 978
979 979 raise util.Abort(_("revision matching date not found"))
980 980
981 981 def walkchangerevs(ui, repo, pats, change, opts):
982 982 '''Iterate over files and the revs in which they changed.
983 983
984 984 Callers most commonly need to iterate backwards over the history
985 985 in which they are interested. Doing so has awful (quadratic-looking)
986 986 performance, so we use iterators in a "windowed" way.
987 987
988 988 We walk a window of revisions in the desired order. Within the
989 989 window, we first walk forwards to gather data, then in the desired
990 990 order (usually backwards) to display it.
991 991
992 992 This function returns an (iterator, matchfn) tuple. The iterator
993 993 yields 3-tuples. They will be of one of the following forms:
994 994
995 995 "window", incrementing, lastrev: stepping through a window,
996 996 positive if walking forwards through revs, last rev in the
997 997 sequence iterated over - use to reset state for the current window
998 998
999 999 "add", rev, fns: out-of-order traversal of the given file names
1000 1000 fns, which changed during revision rev - use to gather data for
1001 1001 possible display
1002 1002
1003 1003 "iter", rev, None: in-order traversal of the revs earlier iterated
1004 1004 over with "add" - use to display data'''
1005 1005
1006 1006 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1007 1007 if start < end:
1008 1008 while start < end:
1009 1009 yield start, min(windowsize, end-start)
1010 1010 start += windowsize
1011 1011 if windowsize < sizelimit:
1012 1012 windowsize *= 2
1013 1013 else:
1014 1014 while start > end:
1015 1015 yield start, min(windowsize, start-end-1)
1016 1016 start -= windowsize
1017 1017 if windowsize < sizelimit:
1018 1018 windowsize *= 2
1019 1019
1020 1020 m = match(repo, pats, opts)
1021 1021 follow = opts.get('follow') or opts.get('follow_first')
1022 1022
1023 1023 if not len(repo):
1024 1024 return [], m
1025 1025
1026 1026 if follow:
1027 1027 defrange = '%s:0' % repo['.'].rev()
1028 1028 else:
1029 1029 defrange = '-1:0'
1030 1030 revs = revrange(repo, opts['rev'] or [defrange])
1031 1031 wanted = set()
1032 1032 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1033 1033 fncache = {}
1034 1034
1035 1035 if not slowpath and not m.files():
1036 1036 # No files, no patterns. Display all revs.
1037 1037 wanted = set(revs)
1038 1038 copies = []
1039 1039 if not slowpath:
1040 1040 # Only files, no patterns. Check the history of each file.
1041 1041 def filerevgen(filelog, node):
1042 1042 cl_count = len(repo)
1043 1043 if node is None:
1044 1044 last = len(filelog) - 1
1045 1045 else:
1046 1046 last = filelog.rev(node)
1047 1047 for i, window in increasing_windows(last, nullrev):
1048 1048 revs = []
1049 1049 for j in xrange(i - window, i + 1):
1050 1050 n = filelog.node(j)
1051 1051 revs.append((filelog.linkrev(j),
1052 1052 follow and filelog.renamed(n)))
1053 1053 for rev in reversed(revs):
1054 1054 # only yield rev for which we have the changelog, it can
1055 1055 # happen while doing "hg log" during a pull or commit
1056 1056 if rev[0] < cl_count:
1057 1057 yield rev
1058 1058 def iterfiles():
1059 1059 for filename in m.files():
1060 1060 yield filename, None
1061 1061 for filename_node in copies:
1062 1062 yield filename_node
1063 1063 minrev, maxrev = min(revs), max(revs)
1064 1064 for file_, node in iterfiles():
1065 1065 filelog = repo.file(file_)
1066 1066 if not len(filelog):
1067 1067 if node is None:
1068 1068 # A zero count may be a directory or deleted file, so
1069 1069 # try to find matching entries on the slow path.
1070 1070 if follow:
1071 1071 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1072 1072 slowpath = True
1073 1073 break
1074 1074 else:
1075 1075 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1076 1076 % (file_, short(node)))
1077 1077 continue
1078 1078 for rev, copied in filerevgen(filelog, node):
1079 1079 if rev <= maxrev:
1080 1080 if rev < minrev:
1081 1081 break
1082 1082 fncache.setdefault(rev, [])
1083 1083 fncache[rev].append(file_)
1084 1084 wanted.add(rev)
1085 1085 if follow and copied:
1086 1086 copies.append(copied)
1087 1087 if slowpath:
1088 1088 if follow:
1089 1089 raise util.Abort(_('can only follow copies/renames for explicit '
1090 1090 'file names'))
1091 1091
1092 1092 # The slow path checks files modified in every changeset.
1093 1093 def changerevgen():
1094 1094 for i, window in increasing_windows(len(repo) - 1, nullrev):
1095 1095 for j in xrange(i - window, i + 1):
1096 1096 yield j, change(j)[3]
1097 1097
1098 1098 for rev, changefiles in changerevgen():
1099 1099 matches = filter(m, changefiles)
1100 1100 if matches:
1101 1101 fncache[rev] = matches
1102 1102 wanted.add(rev)
1103 1103
1104 1104 class followfilter:
1105 1105 def __init__(self, onlyfirst=False):
1106 1106 self.startrev = nullrev
1107 1107 self.roots = []
1108 1108 self.onlyfirst = onlyfirst
1109 1109
1110 1110 def match(self, rev):
1111 1111 def realparents(rev):
1112 1112 if self.onlyfirst:
1113 1113 return repo.changelog.parentrevs(rev)[0:1]
1114 1114 else:
1115 1115 return filter(lambda x: x != nullrev,
1116 1116 repo.changelog.parentrevs(rev))
1117 1117
1118 1118 if self.startrev == nullrev:
1119 1119 self.startrev = rev
1120 1120 return True
1121 1121
1122 1122 if rev > self.startrev:
1123 1123 # forward: all descendants
1124 1124 if not self.roots:
1125 1125 self.roots.append(self.startrev)
1126 1126 for parent in realparents(rev):
1127 1127 if parent in self.roots:
1128 1128 self.roots.append(rev)
1129 1129 return True
1130 1130 else:
1131 1131 # backwards: all parents
1132 1132 if not self.roots:
1133 1133 self.roots.extend(realparents(self.startrev))
1134 1134 if rev in self.roots:
1135 1135 self.roots.remove(rev)
1136 1136 self.roots.extend(realparents(rev))
1137 1137 return True
1138 1138
1139 1139 return False
1140 1140
1141 1141 # it might be worthwhile to do this in the iterator if the rev range
1142 1142 # is descending and the prune args are all within that range
1143 1143 for rev in opts.get('prune', ()):
1144 1144 rev = repo.changelog.rev(repo.lookup(rev))
1145 1145 ff = followfilter()
1146 1146 stop = min(revs[0], revs[-1])
1147 1147 for x in xrange(rev, stop-1, -1):
1148 1148 if ff.match(x):
1149 1149 wanted.discard(x)
1150 1150
1151 1151 def iterate():
1152 1152 if follow and not m.files():
1153 1153 ff = followfilter(onlyfirst=opts.get('follow_first'))
1154 1154 def want(rev):
1155 1155 return ff.match(rev) and rev in wanted
1156 1156 else:
1157 1157 def want(rev):
1158 1158 return rev in wanted
1159 1159
1160 1160 for i, window in increasing_windows(0, len(revs)):
1161 1161 yield 'window', revs[0] < revs[-1], revs[-1]
1162 1162 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1163 1163 for rev in sorted(nrevs):
1164 1164 fns = fncache.get(rev)
1165 1165 if not fns:
1166 1166 def fns_generator():
1167 1167 for f in change(rev)[3]:
1168 1168 if m(f):
1169 1169 yield f
1170 1170 fns = fns_generator()
1171 1171 yield 'add', rev, fns
1172 1172 for rev in nrevs:
1173 1173 yield 'iter', rev, None
1174 1174 return iterate(), m
1175 1175
1176 1176 def commit(ui, repo, commitfunc, pats, opts):
1177 1177 '''commit the specified files or all outstanding changes'''
1178 1178 date = opts.get('date')
1179 1179 if date:
1180 1180 opts['date'] = util.parsedate(date)
1181 1181 message = logmessage(opts)
1182 1182
1183 1183 # extract addremove carefully -- this function can be called from a command
1184 1184 # that doesn't support addremove
1185 1185 if opts.get('addremove'):
1186 1186 addremove(repo, pats, opts)
1187 1187
1188 1188 m = match(repo, pats, opts)
1189 1189 if pats:
1190 1190 modified, added, removed = repo.status(match=m)[:3]
1191 1191 files = sorted(modified + added + removed)
1192 1192
1193 1193 def is_dir(f):
1194 1194 name = f + '/'
1195 1195 i = bisect.bisect(files, name)
1196 1196 return i < len(files) and files[i].startswith(name)
1197 1197
1198 1198 for f in m.files():
1199 1199 if f == '.':
1200 1200 continue
1201 1201 if f not in files:
1202 1202 rf = repo.wjoin(f)
1203 1203 rel = repo.pathto(f)
1204 1204 try:
1205 1205 mode = os.lstat(rf)[stat.ST_MODE]
1206 1206 except OSError:
1207 1207 if is_dir(f): # deleted directory ?
1208 1208 continue
1209 1209 raise util.Abort(_("file %s not found!") % rel)
1210 1210 if stat.S_ISDIR(mode):
1211 1211 if not is_dir(f):
1212 1212 raise util.Abort(_("no match under directory %s!")
1213 1213 % rel)
1214 1214 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1215 1215 raise util.Abort(_("can't commit %s: "
1216 1216 "unsupported file type!") % rel)
1217 1217 elif f not in repo.dirstate:
1218 1218 raise util.Abort(_("file %s not tracked!") % rel)
1219 1219 m = matchfiles(repo, files)
1220 1220 try:
1221 1221 return commitfunc(ui, repo, message, m, opts)
1222 1222 except ValueError, inst:
1223 1223 raise util.Abort(str(inst))
1224
1225 def commiteditor(repo, ctx, added, updated, removed):
1226 if ctx.description():
1227 return ctx.description()
1228 return commitforceeditor(repo, ctx, added, updated, removed)
1229
1230 def commitforceeditor(repo, ctx, added, updated, removed):
1231 edittext = []
1232 if ctx.description():
1233 edittext.append(ctx.description())
1234 edittext.append("")
1235 edittext.append("") # Empty line between message and comments.
1236 edittext.append(_("HG: Enter commit message."
1237 " Lines beginning with 'HG:' are removed."))
1238 edittext.append("HG: --")
1239 edittext.append(_("HG: user: %s") % ctx.user())
1240 if ctx.p2():
1241 edittext.append(_("HG: branch merge"))
1242 if ctx.branch():
1243 edittext.append(_("HG: branch '%s'")
1244 % encoding.tolocal(ctx.branch()))
1245 edittext.extend([_("HG: added %s") % f for f in added])
1246 edittext.extend([_("HG: changed %s") % f for f in updated])
1247 edittext.extend([_("HG: removed %s") % f for f in removed])
1248 if not added and not updated and not removed:
1249 edittext.append(_("HG: no files changed"))
1250 edittext.append("")
1251 # run editor in the repository root
1252 olddir = os.getcwd()
1253 os.chdir(repo.root)
1254 text = repo.ui.edit("\n".join(edittext), ctx.user())
1255 os.chdir(olddir)
1256
1257 if not text.strip():
1258 raise util.Abort(_("empty commit message"))
1259
1260 return text
@@ -1,3447 +1,3451 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _, gettext
11 11 import os, re, sys, textwrap, subprocess, difflib, time
12 12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
13 13 import patch, help, mdiff, tempfile, url, encoding
14 14 import archival, changegroup, cmdutil, sshserver, hbisect
15 15 from hgweb import server
16 16 import merge as merge_
17 17
18 18 # Commands start here, listed alphabetically
19 19
20 20 def add(ui, repo, *pats, **opts):
21 21 """add the specified files on the next commit
22 22
23 23 Schedule files to be version controlled and added to the
24 24 repository.
25 25
26 26 The files will be added to the repository at the next commit. To
27 27 undo an add before that, see hg revert.
28 28
29 29 If no names are given, add all files to the repository.
30 30 """
31 31
32 32 rejected = None
33 33 exacts = {}
34 34 names = []
35 35 m = cmdutil.match(repo, pats, opts)
36 36 m.bad = lambda x,y: True
37 37 for abs in repo.walk(m):
38 38 if m.exact(abs):
39 39 if ui.verbose:
40 40 ui.status(_('adding %s\n') % m.rel(abs))
41 41 names.append(abs)
42 42 exacts[abs] = 1
43 43 elif abs not in repo.dirstate:
44 44 ui.status(_('adding %s\n') % m.rel(abs))
45 45 names.append(abs)
46 46 if not opts.get('dry_run'):
47 47 rejected = repo.add(names)
48 48 rejected = [p for p in rejected if p in exacts]
49 49 return rejected and 1 or 0
50 50
51 51 def addremove(ui, repo, *pats, **opts):
52 52 """add all new files, delete all missing files
53 53
54 54 Add all new files and remove all missing files from the
55 55 repository.
56 56
57 57 New files are ignored if they match any of the patterns in
58 58 .hgignore. As with add, these changes take effect at the next
59 59 commit.
60 60
61 61 Use the -s/--similarity option to detect renamed files. With a
62 62 parameter > 0, this compares every removed file with every added
63 63 file and records those similar enough as renames. This option
64 64 takes a percentage between 0 (disabled) and 100 (files must be
65 65 identical) as its parameter. Detecting renamed files this way can
66 66 be expensive.
67 67 """
68 68 try:
69 69 sim = float(opts.get('similarity') or 0)
70 70 except ValueError:
71 71 raise util.Abort(_('similarity must be a number'))
72 72 if sim < 0 or sim > 100:
73 73 raise util.Abort(_('similarity must be between 0 and 100'))
74 74 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
75 75
76 76 def annotate(ui, repo, *pats, **opts):
77 77 """show changeset information per file line
78 78
79 79 List changes in files, showing the revision id responsible for
80 80 each line
81 81
82 82 This command is useful to discover who did a change or when a
83 83 change took place.
84 84
85 85 Without the -a/--text option, annotate will avoid processing files
86 86 it detects as binary. With -a, annotate will generate an
87 87 annotation anyway, probably with undesirable results.
88 88 """
89 89 datefunc = ui.quiet and util.shortdate or util.datestr
90 90 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
91 91
92 92 if not pats:
93 93 raise util.Abort(_('at least one file name or pattern required'))
94 94
95 95 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
96 96 ('number', lambda x: str(x[0].rev())),
97 97 ('changeset', lambda x: short(x[0].node())),
98 98 ('date', getdate),
99 99 ('follow', lambda x: x[0].path()),
100 100 ]
101 101
102 102 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
103 103 and not opts.get('follow')):
104 104 opts['number'] = 1
105 105
106 106 linenumber = opts.get('line_number') is not None
107 107 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
108 108 raise util.Abort(_('at least one of -n/-c is required for -l'))
109 109
110 110 funcmap = [func for op, func in opmap if opts.get(op)]
111 111 if linenumber:
112 112 lastfunc = funcmap[-1]
113 113 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
114 114
115 115 ctx = repo[opts.get('rev')]
116 116
117 117 m = cmdutil.match(repo, pats, opts)
118 118 for abs in ctx.walk(m):
119 119 fctx = ctx[abs]
120 120 if not opts.get('text') and util.binary(fctx.data()):
121 121 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
122 122 continue
123 123
124 124 lines = fctx.annotate(follow=opts.get('follow'),
125 125 linenumber=linenumber)
126 126 pieces = []
127 127
128 128 for f in funcmap:
129 129 l = [f(n) for n, dummy in lines]
130 130 if l:
131 131 ml = max(map(len, l))
132 132 pieces.append(["%*s" % (ml, x) for x in l])
133 133
134 134 if pieces:
135 135 for p, l in zip(zip(*pieces), lines):
136 136 ui.write("%s: %s" % (" ".join(p), l[1]))
137 137
138 138 def archive(ui, repo, dest, **opts):
139 139 '''create unversioned archive of a repository revision
140 140
141 141 By default, the revision used is the parent of the working
142 142 directory; use -r/--rev to specify a different revision.
143 143
144 144 To specify the type of archive to create, use -t/--type. Valid
145 145 types are:
146 146
147 147 "files" (default): a directory full of files
148 148 "tar": tar archive, uncompressed
149 149 "tbz2": tar archive, compressed using bzip2
150 150 "tgz": tar archive, compressed using gzip
151 151 "uzip": zip archive, uncompressed
152 152 "zip": zip archive, compressed using deflate
153 153
154 154 The exact name of the destination archive or directory is given
155 155 using a format string; see 'hg help export' for details.
156 156
157 157 Each member added to an archive file has a directory prefix
158 158 prepended. Use -p/--prefix to specify a format string for the
159 159 prefix. The default is the basename of the archive, with suffixes
160 160 removed.
161 161 '''
162 162
163 163 ctx = repo[opts.get('rev')]
164 164 if not ctx:
165 165 raise util.Abort(_('no working directory: please specify a revision'))
166 166 node = ctx.node()
167 167 dest = cmdutil.make_filename(repo, dest, node)
168 168 if os.path.realpath(dest) == repo.root:
169 169 raise util.Abort(_('repository root cannot be destination'))
170 170 matchfn = cmdutil.match(repo, [], opts)
171 171 kind = opts.get('type') or 'files'
172 172 prefix = opts.get('prefix')
173 173 if dest == '-':
174 174 if kind == 'files':
175 175 raise util.Abort(_('cannot archive plain files to stdout'))
176 176 dest = sys.stdout
177 177 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
178 178 prefix = cmdutil.make_filename(repo, prefix, node)
179 179 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
180 180 matchfn, prefix)
181 181
182 182 def backout(ui, repo, node=None, rev=None, **opts):
183 183 '''reverse effect of earlier changeset
184 184
185 185 Commit the backed out changes as a new changeset. The new
186 186 changeset is a child of the backed out changeset.
187 187
188 188 If you back out a changeset other than the tip, a new head is
189 189 created. This head will be the new tip and you should merge this
190 190 backout changeset with another head (current one by default).
191 191
192 192 The --merge option remembers the parent of the working directory
193 193 before starting the backout, then merges the new head with that
194 194 changeset afterwards. This saves you from doing the merge by hand.
195 195 The result of this merge is not committed, as with a normal merge.
196 196
197 197 See \'hg help dates\' for a list of formats valid for -d/--date.
198 198 '''
199 199 if rev and node:
200 200 raise util.Abort(_("please specify just one revision"))
201 201
202 202 if not rev:
203 203 rev = node
204 204
205 205 if not rev:
206 206 raise util.Abort(_("please specify a revision to backout"))
207 207
208 208 date = opts.get('date')
209 209 if date:
210 210 opts['date'] = util.parsedate(date)
211 211
212 212 cmdutil.bail_if_changed(repo)
213 213 node = repo.lookup(rev)
214 214
215 215 op1, op2 = repo.dirstate.parents()
216 216 a = repo.changelog.ancestor(op1, node)
217 217 if a != node:
218 218 raise util.Abort(_('cannot back out change on a different branch'))
219 219
220 220 p1, p2 = repo.changelog.parents(node)
221 221 if p1 == nullid:
222 222 raise util.Abort(_('cannot back out a change with no parents'))
223 223 if p2 != nullid:
224 224 if not opts.get('parent'):
225 225 raise util.Abort(_('cannot back out a merge changeset without '
226 226 '--parent'))
227 227 p = repo.lookup(opts['parent'])
228 228 if p not in (p1, p2):
229 229 raise util.Abort(_('%s is not a parent of %s') %
230 230 (short(p), short(node)))
231 231 parent = p
232 232 else:
233 233 if opts.get('parent'):
234 234 raise util.Abort(_('cannot use --parent on non-merge changeset'))
235 235 parent = p1
236 236
237 237 # the backout should appear on the same branch
238 238 branch = repo.dirstate.branch()
239 239 hg.clean(repo, node, show_stats=False)
240 240 repo.dirstate.setbranch(branch)
241 241 revert_opts = opts.copy()
242 242 revert_opts['date'] = None
243 243 revert_opts['all'] = True
244 244 revert_opts['rev'] = hex(parent)
245 245 revert_opts['no_backup'] = None
246 246 revert(ui, repo, **revert_opts)
247 247 commit_opts = opts.copy()
248 248 commit_opts['addremove'] = False
249 249 if not commit_opts['message'] and not commit_opts['logfile']:
250 250 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
251 251 commit_opts['force_editor'] = True
252 252 commit(ui, repo, **commit_opts)
253 253 def nice(node):
254 254 return '%d:%s' % (repo.changelog.rev(node), short(node))
255 255 ui.status(_('changeset %s backs out changeset %s\n') %
256 256 (nice(repo.changelog.tip()), nice(node)))
257 257 if op1 != node:
258 258 hg.clean(repo, op1, show_stats=False)
259 259 if opts.get('merge'):
260 260 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
261 261 hg.merge(repo, hex(repo.changelog.tip()))
262 262 else:
263 263 ui.status(_('the backout changeset is a new head - '
264 264 'do not forget to merge\n'))
265 265 ui.status(_('(use "backout --merge" '
266 266 'if you want to auto-merge)\n'))
267 267
268 268 def bisect(ui, repo, rev=None, extra=None, command=None,
269 269 reset=None, good=None, bad=None, skip=None, noupdate=None):
270 270 """subdivision search of changesets
271 271
272 272 This command helps to find changesets which introduce problems. To
273 273 use, mark the earliest changeset you know exhibits the problem as
274 274 bad, then mark the latest changeset which is free from the problem
275 275 as good. Bisect will update your working directory to a revision
276 276 for testing (unless the -U/--noupdate option is specified). Once
277 277 you have performed tests, mark the working directory as bad or
278 278 good and bisect will either update to another candidate changeset
279 279 or announce that it has found the bad revision.
280 280
281 281 As a shortcut, you can also use the revision argument to mark a
282 282 revision as good or bad without checking it out first.
283 283
284 284 If you supply a command it will be used for automatic bisection.
285 285 Its exit status will be used as flag to mark revision as bad or
286 286 good. In case exit status is 0 the revision is marked as good, 125
287 287 - skipped, 127 (command not found) - bisection will be aborted;
288 288 any other status bigger than 0 will mark revision as bad.
289 289 """
290 290 def print_result(nodes, good):
291 291 displayer = cmdutil.show_changeset(ui, repo, {})
292 292 if len(nodes) == 1:
293 293 # narrowed it down to a single revision
294 294 if good:
295 295 ui.write(_("The first good revision is:\n"))
296 296 else:
297 297 ui.write(_("The first bad revision is:\n"))
298 298 displayer.show(repo[nodes[0]])
299 299 else:
300 300 # multiple possible revisions
301 301 if good:
302 302 ui.write(_("Due to skipped revisions, the first "
303 303 "good revision could be any of:\n"))
304 304 else:
305 305 ui.write(_("Due to skipped revisions, the first "
306 306 "bad revision could be any of:\n"))
307 307 for n in nodes:
308 308 displayer.show(repo[n])
309 309
310 310 def check_state(state, interactive=True):
311 311 if not state['good'] or not state['bad']:
312 312 if (good or bad or skip or reset) and interactive:
313 313 return
314 314 if not state['good']:
315 315 raise util.Abort(_('cannot bisect (no known good revisions)'))
316 316 else:
317 317 raise util.Abort(_('cannot bisect (no known bad revisions)'))
318 318 return True
319 319
320 320 # backward compatibility
321 321 if rev in "good bad reset init".split():
322 322 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
323 323 cmd, rev, extra = rev, extra, None
324 324 if cmd == "good":
325 325 good = True
326 326 elif cmd == "bad":
327 327 bad = True
328 328 else:
329 329 reset = True
330 330 elif extra or good + bad + skip + reset + bool(command) > 1:
331 331 raise util.Abort(_('incompatible arguments'))
332 332
333 333 if reset:
334 334 p = repo.join("bisect.state")
335 335 if os.path.exists(p):
336 336 os.unlink(p)
337 337 return
338 338
339 339 state = hbisect.load_state(repo)
340 340
341 341 if command:
342 342 commandpath = util.find_exe(command)
343 343 changesets = 1
344 344 try:
345 345 while changesets:
346 346 # update state
347 347 status = subprocess.call([commandpath])
348 348 if status == 125:
349 349 transition = "skip"
350 350 elif status == 0:
351 351 transition = "good"
352 352 # status < 0 means process was killed
353 353 elif status == 127:
354 354 raise util.Abort(_("failed to execute %s") % command)
355 355 elif status < 0:
356 356 raise util.Abort(_("%s killed") % command)
357 357 else:
358 358 transition = "bad"
359 359 node = repo.lookup(rev or '.')
360 360 state[transition].append(node)
361 361 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
362 362 check_state(state, interactive=False)
363 363 # bisect
364 364 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
365 365 # update to next check
366 366 cmdutil.bail_if_changed(repo)
367 367 hg.clean(repo, nodes[0], show_stats=False)
368 368 finally:
369 369 hbisect.save_state(repo, state)
370 370 return print_result(nodes, not status)
371 371
372 372 # update state
373 373 node = repo.lookup(rev or '.')
374 374 if good:
375 375 state['good'].append(node)
376 376 elif bad:
377 377 state['bad'].append(node)
378 378 elif skip:
379 379 state['skip'].append(node)
380 380
381 381 hbisect.save_state(repo, state)
382 382
383 383 if not check_state(state):
384 384 return
385 385
386 386 # actually bisect
387 387 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
388 388 if changesets == 0:
389 389 print_result(nodes, good)
390 390 else:
391 391 assert len(nodes) == 1 # only a single node can be tested next
392 392 node = nodes[0]
393 393 # compute the approximate number of remaining tests
394 394 tests, size = 0, 2
395 395 while size <= changesets:
396 396 tests, size = tests + 1, size * 2
397 397 rev = repo.changelog.rev(node)
398 398 ui.write(_("Testing changeset %s:%s "
399 399 "(%s changesets remaining, ~%s tests)\n")
400 400 % (rev, short(node), changesets, tests))
401 401 if not noupdate:
402 402 cmdutil.bail_if_changed(repo)
403 403 return hg.clean(repo, node)
404 404
405 405 def branch(ui, repo, label=None, **opts):
406 406 """set or show the current branch name
407 407
408 408 With no argument, show the current branch name. With one argument,
409 409 set the working directory branch name (the branch does not exist
410 410 in the repository until the next commit). It is recommended to use
411 411 the 'default' branch as your primary development branch.
412 412
413 413 Unless -f/--force is specified, branch will not let you set a
414 414 branch name that shadows an existing branch.
415 415
416 416 Use -C/--clean to reset the working directory branch to that of
417 417 the parent of the working directory, negating a previous branch
418 418 change.
419 419
420 420 Use the command 'hg update' to switch to an existing branch.
421 421 """
422 422
423 423 if opts.get('clean'):
424 424 label = repo[None].parents()[0].branch()
425 425 repo.dirstate.setbranch(label)
426 426 ui.status(_('reset working directory to branch %s\n') % label)
427 427 elif label:
428 428 if not opts.get('force') and label in repo.branchtags():
429 429 if label not in [p.branch() for p in repo.parents()]:
430 430 raise util.Abort(_('a branch of the same name already exists'
431 431 ' (use --force to override)'))
432 432 repo.dirstate.setbranch(encoding.fromlocal(label))
433 433 ui.status(_('marked working directory as branch %s\n') % label)
434 434 else:
435 435 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
436 436
437 437 def branches(ui, repo, active=False):
438 438 """list repository named branches
439 439
440 440 List the repository's named branches, indicating which ones are
441 441 inactive. If active is specified, only show active branches.
442 442
443 443 A branch is considered active if it contains repository heads.
444 444
445 445 Use the command 'hg update' to switch to an existing branch.
446 446 """
447 447 hexfunc = ui.debugflag and hex or short
448 448 activebranches = [encoding.tolocal(repo[n].branch())
449 449 for n in repo.heads(closed=False)]
450 450 branches = sorted([(tag in activebranches, repo.changelog.rev(node), tag)
451 451 for tag, node in repo.branchtags().items()],
452 452 reverse=True)
453 453
454 454 for isactive, node, tag in branches:
455 455 if (not active) or isactive:
456 456 if ui.quiet:
457 457 ui.write("%s\n" % tag)
458 458 else:
459 459 hn = repo.lookup(node)
460 460 if isactive:
461 461 notice = ''
462 462 elif hn not in repo.branchheads(tag, closed=False):
463 463 notice = ' (closed)'
464 464 else:
465 465 notice = ' (inactive)'
466 466 rev = str(node).rjust(31 - encoding.colwidth(tag))
467 467 data = tag, rev, hexfunc(hn), notice
468 468 ui.write("%s %s:%s%s\n" % data)
469 469
470 470 def bundle(ui, repo, fname, dest=None, **opts):
471 471 """create a changegroup file
472 472
473 473 Generate a compressed changegroup file collecting changesets not
474 474 known to be in another repository.
475 475
476 476 If no destination repository is specified the destination is
477 477 assumed to have all the nodes specified by one or more --base
478 478 parameters. To create a bundle containing all changesets, use
479 479 -a/--all (or --base null). To change the compression method
480 480 applied, use the -t/--type option (by default, bundles are
481 481 compressed using bz2).
482 482
483 483 The bundle file can then be transferred using conventional means
484 484 and applied to another repository with the unbundle or pull
485 485 command. This is useful when direct push and pull are not
486 486 available or when exporting an entire repository is undesirable.
487 487
488 488 Applying bundles preserves all changeset contents including
489 489 permissions, copy/rename information, and revision history.
490 490 """
491 491 revs = opts.get('rev') or None
492 492 if revs:
493 493 revs = [repo.lookup(rev) for rev in revs]
494 494 if opts.get('all'):
495 495 base = ['null']
496 496 else:
497 497 base = opts.get('base')
498 498 if base:
499 499 if dest:
500 500 raise util.Abort(_("--base is incompatible with specifiying "
501 501 "a destination"))
502 502 base = [repo.lookup(rev) for rev in base]
503 503 # create the right base
504 504 # XXX: nodesbetween / changegroup* should be "fixed" instead
505 505 o = []
506 506 has = {nullid: None}
507 507 for n in base:
508 508 has.update(repo.changelog.reachable(n))
509 509 if revs:
510 510 visit = list(revs)
511 511 else:
512 512 visit = repo.changelog.heads()
513 513 seen = {}
514 514 while visit:
515 515 n = visit.pop(0)
516 516 parents = [p for p in repo.changelog.parents(n) if p not in has]
517 517 if len(parents) == 0:
518 518 o.insert(0, n)
519 519 else:
520 520 for p in parents:
521 521 if p not in seen:
522 522 seen[p] = 1
523 523 visit.append(p)
524 524 else:
525 525 dest, revs, checkout = hg.parseurl(
526 526 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
527 527 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
528 528 o = repo.findoutgoing(other, force=opts.get('force'))
529 529
530 530 if revs:
531 531 cg = repo.changegroupsubset(o, revs, 'bundle')
532 532 else:
533 533 cg = repo.changegroup(o, 'bundle')
534 534
535 535 bundletype = opts.get('type', 'bzip2').lower()
536 536 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
537 537 bundletype = btypes.get(bundletype)
538 538 if bundletype not in changegroup.bundletypes:
539 539 raise util.Abort(_('unknown bundle type specified with --type'))
540 540
541 541 changegroup.writebundle(cg, fname, bundletype)
542 542
543 543 def cat(ui, repo, file1, *pats, **opts):
544 544 """output the current or given revision of files
545 545
546 546 Print the specified files as they were at the given revision. If
547 547 no revision is given, the parent of the working directory is used,
548 548 or tip if no revision is checked out.
549 549
550 550 Output may be to a file, in which case the name of the file is
551 551 given using a format string. The formatting rules are the same as
552 552 for the export command, with the following additions:
553 553
554 554 %s basename of file being printed
555 555 %d dirname of file being printed, or '.' if in repository root
556 556 %p root-relative path name of file being printed
557 557 """
558 558 ctx = repo[opts.get('rev')]
559 559 err = 1
560 560 m = cmdutil.match(repo, (file1,) + pats, opts)
561 561 for abs in ctx.walk(m):
562 562 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
563 563 data = ctx[abs].data()
564 564 if opts.get('decode'):
565 565 data = repo.wwritedata(abs, data)
566 566 fp.write(data)
567 567 err = 0
568 568 return err
569 569
570 570 def clone(ui, source, dest=None, **opts):
571 571 """make a copy of an existing repository
572 572
573 573 Create a copy of an existing repository in a new directory.
574 574
575 575 If no destination directory name is specified, it defaults to the
576 576 basename of the source.
577 577
578 578 The location of the source is added to the new repository's
579 579 .hg/hgrc file, as the default to be used for future pulls.
580 580
581 581 If you use the -r/--rev option to clone up to a specific revision,
582 582 no subsequent revisions (including subsequent tags) will be
583 583 present in the cloned repository. This option implies --pull, even
584 584 on local repositories.
585 585
586 586 By default, clone will check out the head of the 'default' branch.
587 587 If the -U/--noupdate option is used, the new clone will contain
588 588 only a repository (.hg) and no working copy (the working copy
589 589 parent is the null revision).
590 590
591 591 See 'hg help urls' for valid source format details.
592 592
593 593 It is possible to specify an ssh:// URL as the destination, but no
594 594 .hg/hgrc and working directory will be created on the remote side.
595 595 Look at the help text for URLs for important details about ssh://
596 596 URLs.
597 597
598 598 For efficiency, hardlinks are used for cloning whenever the source
599 599 and destination are on the same filesystem (note this applies only
600 600 to the repository data, not to the checked out files). Some
601 601 filesystems, such as AFS, implement hardlinking incorrectly, but
602 602 do not report errors. In these cases, use the --pull option to
603 603 avoid hardlinking.
604 604
605 605 In some cases, you can clone repositories and checked out files
606 606 using full hardlinks with
607 607
608 608 $ cp -al REPO REPOCLONE
609 609
610 610 This is the fastest way to clone, but it is not always safe. The
611 611 operation is not atomic (making sure REPO is not modified during
612 612 the operation is up to you) and you have to make sure your editor
613 613 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
614 614 this is not compatible with certain extensions that place their
615 615 metadata under the .hg directory, such as mq.
616 616
617 617 """
618 618 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
619 619 pull=opts.get('pull'),
620 620 stream=opts.get('uncompressed'),
621 621 rev=opts.get('rev'),
622 622 update=not opts.get('noupdate'))
623 623
624 624 def commit(ui, repo, *pats, **opts):
625 625 """commit the specified files or all outstanding changes
626 626
627 627 Commit changes to the given files into the repository. Unlike a
628 628 centralized RCS, this operation is a local operation. See hg push
629 629 for means to actively distribute your changes.
630 630
631 631 If a list of files is omitted, all changes reported by "hg status"
632 632 will be committed.
633 633
634 634 If you are committing the result of a merge, do not provide any
635 635 file names or -I/-X filters.
636 636
637 637 If no commit message is specified, the configured editor is
638 638 started to prompt you for a message.
639 639
640 640 See 'hg help dates' for a list of formats valid for -d/--date.
641 641 """
642 642 extra = {}
643 643 if opts.get('close_branch'):
644 644 extra['close'] = 1
645 e = cmdutil.commiteditor
646 if opts.get('force_editor'):
647 e = cmdutil.commitforceeditor
648
645 649 def commitfunc(ui, repo, message, match, opts):
646 650 return repo.commit(match.files(), message, opts.get('user'),
647 opts.get('date'), match, force_editor=opts.get('force_editor'),
648 extra=extra)
651 opts.get('date'), match, editor=e, extra=extra)
649 652
650 653 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
651 654 if not node:
652 655 return
653 656 cl = repo.changelog
654 657 rev = cl.rev(node)
655 658 parents = cl.parentrevs(rev)
656 659 if rev - 1 in parents:
657 660 # one of the parents was the old tip
658 661 pass
659 662 elif (parents == (nullrev, nullrev) or
660 663 len(cl.heads(cl.node(parents[0]))) > 1 and
661 664 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
662 665 ui.status(_('created new head\n'))
663 666
664 667 if ui.debugflag:
665 668 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
666 669 elif ui.verbose:
667 670 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
668 671
669 672 def copy(ui, repo, *pats, **opts):
670 673 """mark files as copied for the next commit
671 674
672 675 Mark dest as having copies of source files. If dest is a
673 676 directory, copies are put in that directory. If dest is a file,
674 677 the source must be a single file.
675 678
676 679 By default, this command copies the contents of files as they
677 680 stand in the working directory. If invoked with -A/--after, the
678 681 operation is recorded, but no copying is performed.
679 682
680 683 This command takes effect with the next commit. To undo a copy
681 684 before that, see hg revert.
682 685 """
683 686 wlock = repo.wlock(False)
684 687 try:
685 688 return cmdutil.copy(ui, repo, pats, opts)
686 689 finally:
687 690 wlock.release()
688 691
689 692 def debugancestor(ui, repo, *args):
690 693 """find the ancestor revision of two revisions in a given index"""
691 694 if len(args) == 3:
692 695 index, rev1, rev2 = args
693 696 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
694 697 lookup = r.lookup
695 698 elif len(args) == 2:
696 699 if not repo:
697 700 raise util.Abort(_("There is no Mercurial repository here "
698 701 "(.hg not found)"))
699 702 rev1, rev2 = args
700 703 r = repo.changelog
701 704 lookup = repo.lookup
702 705 else:
703 706 raise util.Abort(_('either two or three arguments required'))
704 707 a = r.ancestor(lookup(rev1), lookup(rev2))
705 708 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
706 709
707 710 def debugcommands(ui, cmd='', *args):
708 711 for cmd, vals in sorted(table.iteritems()):
709 712 cmd = cmd.split('|')[0].strip('^')
710 713 opts = ', '.join([i[1] for i in vals[1]])
711 714 ui.write('%s: %s\n' % (cmd, opts))
712 715
713 716 def debugcomplete(ui, cmd='', **opts):
714 717 """returns the completion list associated with the given command"""
715 718
716 719 if opts.get('options'):
717 720 options = []
718 721 otables = [globalopts]
719 722 if cmd:
720 723 aliases, entry = cmdutil.findcmd(cmd, table, False)
721 724 otables.append(entry[1])
722 725 for t in otables:
723 726 for o in t:
724 727 if o[0]:
725 728 options.append('-%s' % o[0])
726 729 options.append('--%s' % o[1])
727 730 ui.write("%s\n" % "\n".join(options))
728 731 return
729 732
730 733 cmdlist = cmdutil.findpossible(cmd, table)
731 734 if ui.verbose:
732 735 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
733 736 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
734 737
735 738 def debugfsinfo(ui, path = "."):
736 739 file('.debugfsinfo', 'w').write('')
737 740 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
738 741 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
739 742 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
740 743 and 'yes' or 'no'))
741 744 os.unlink('.debugfsinfo')
742 745
743 746 def debugrebuildstate(ui, repo, rev="tip"):
744 747 """rebuild the dirstate as it would look like for the given revision"""
745 748 ctx = repo[rev]
746 749 wlock = repo.wlock()
747 750 try:
748 751 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
749 752 finally:
750 753 wlock.release()
751 754
752 755 def debugcheckstate(ui, repo):
753 756 """validate the correctness of the current dirstate"""
754 757 parent1, parent2 = repo.dirstate.parents()
755 758 m1 = repo[parent1].manifest()
756 759 m2 = repo[parent2].manifest()
757 760 errors = 0
758 761 for f in repo.dirstate:
759 762 state = repo.dirstate[f]
760 763 if state in "nr" and f not in m1:
761 764 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
762 765 errors += 1
763 766 if state in "a" and f in m1:
764 767 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
765 768 errors += 1
766 769 if state in "m" and f not in m1 and f not in m2:
767 770 ui.warn(_("%s in state %s, but not in either manifest\n") %
768 771 (f, state))
769 772 errors += 1
770 773 for f in m1:
771 774 state = repo.dirstate[f]
772 775 if state not in "nrm":
773 776 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
774 777 errors += 1
775 778 if errors:
776 779 error = _(".hg/dirstate inconsistent with current parent's manifest")
777 780 raise util.Abort(error)
778 781
779 782 def showconfig(ui, repo, *values, **opts):
780 783 """show combined config settings from all hgrc files
781 784
782 785 With no args, print names and values of all config items.
783 786
784 787 With one arg of the form section.name, print just the value of
785 788 that config item.
786 789
787 790 With multiple args, print names and values of all config items
788 791 with matching section names.
789 792
790 793 With the --debug flag, the source (filename and line number) is
791 794 printed for each config item.
792 795 """
793 796
794 797 untrusted = bool(opts.get('untrusted'))
795 798 if values:
796 799 if len([v for v in values if '.' in v]) > 1:
797 800 raise util.Abort(_('only one config item permitted'))
798 801 for section, name, value in ui.walkconfig(untrusted=untrusted):
799 802 sectname = section + '.' + name
800 803 if values:
801 804 for v in values:
802 805 if v == section:
803 806 ui.debug('%s: ' %
804 807 ui.configsource(section, name, untrusted))
805 808 ui.write('%s=%s\n' % (sectname, value))
806 809 elif v == sectname:
807 810 ui.debug('%s: ' %
808 811 ui.configsource(section, name, untrusted))
809 812 ui.write(value, '\n')
810 813 else:
811 814 ui.debug('%s: ' %
812 815 ui.configsource(section, name, untrusted))
813 816 ui.write('%s=%s\n' % (sectname, value))
814 817
815 818 def debugsetparents(ui, repo, rev1, rev2=None):
816 819 """manually set the parents of the current working directory
817 820
818 821 This is useful for writing repository conversion tools, but should
819 822 be used with care.
820 823 """
821 824
822 825 if not rev2:
823 826 rev2 = hex(nullid)
824 827
825 828 wlock = repo.wlock()
826 829 try:
827 830 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
828 831 finally:
829 832 wlock.release()
830 833
831 834 def debugstate(ui, repo, nodates=None):
832 835 """show the contents of the current dirstate"""
833 836 timestr = ""
834 837 showdate = not nodates
835 838 for file_, ent in sorted(repo.dirstate._map.iteritems()):
836 839 if showdate:
837 840 if ent[3] == -1:
838 841 # Pad or slice to locale representation
839 842 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
840 843 timestr = 'unset'
841 844 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
842 845 else:
843 846 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
844 847 if ent[1] & 020000:
845 848 mode = 'lnk'
846 849 else:
847 850 mode = '%3o' % (ent[1] & 0777)
848 851 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
849 852 for f in repo.dirstate.copies():
850 853 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
851 854
852 855 def debugdata(ui, file_, rev):
853 856 """dump the contents of a data file revision"""
854 857 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
855 858 try:
856 859 ui.write(r.revision(r.lookup(rev)))
857 860 except KeyError:
858 861 raise util.Abort(_('invalid revision identifier %s') % rev)
859 862
860 863 def debugdate(ui, date, range=None, **opts):
861 864 """parse and display a date"""
862 865 if opts["extended"]:
863 866 d = util.parsedate(date, util.extendeddateformats)
864 867 else:
865 868 d = util.parsedate(date)
866 869 ui.write("internal: %s %s\n" % d)
867 870 ui.write("standard: %s\n" % util.datestr(d))
868 871 if range:
869 872 m = util.matchdate(range)
870 873 ui.write("match: %s\n" % m(d[0]))
871 874
872 875 def debugindex(ui, file_):
873 876 """dump the contents of an index file"""
874 877 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
875 878 ui.write(" rev offset length base linkrev"
876 879 " nodeid p1 p2\n")
877 880 for i in r:
878 881 node = r.node(i)
879 882 try:
880 883 pp = r.parents(node)
881 884 except:
882 885 pp = [nullid, nullid]
883 886 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
884 887 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
885 888 short(node), short(pp[0]), short(pp[1])))
886 889
887 890 def debugindexdot(ui, file_):
888 891 """dump an index DAG as a .dot file"""
889 892 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
890 893 ui.write("digraph G {\n")
891 894 for i in r:
892 895 node = r.node(i)
893 896 pp = r.parents(node)
894 897 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
895 898 if pp[1] != nullid:
896 899 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
897 900 ui.write("}\n")
898 901
899 902 def debuginstall(ui):
900 903 '''test Mercurial installation'''
901 904
902 905 def writetemp(contents):
903 906 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
904 907 f = os.fdopen(fd, "wb")
905 908 f.write(contents)
906 909 f.close()
907 910 return name
908 911
909 912 problems = 0
910 913
911 914 # encoding
912 915 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
913 916 try:
914 917 encoding.fromlocal("test")
915 918 except util.Abort, inst:
916 919 ui.write(" %s\n" % inst)
917 920 ui.write(_(" (check that your locale is properly set)\n"))
918 921 problems += 1
919 922
920 923 # compiled modules
921 924 ui.status(_("Checking extensions...\n"))
922 925 try:
923 926 import bdiff, mpatch, base85
924 927 except Exception, inst:
925 928 ui.write(" %s\n" % inst)
926 929 ui.write(_(" One or more extensions could not be found"))
927 930 ui.write(_(" (check that you compiled the extensions)\n"))
928 931 problems += 1
929 932
930 933 # templates
931 934 ui.status(_("Checking templates...\n"))
932 935 try:
933 936 import templater
934 937 templater.templater(templater.templatepath("map-cmdline.default"))
935 938 except Exception, inst:
936 939 ui.write(" %s\n" % inst)
937 940 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
938 941 problems += 1
939 942
940 943 # patch
941 944 ui.status(_("Checking patch...\n"))
942 945 patchproblems = 0
943 946 a = "1\n2\n3\n4\n"
944 947 b = "1\n2\n3\ninsert\n4\n"
945 948 fa = writetemp(a)
946 949 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
947 950 os.path.basename(fa))
948 951 fd = writetemp(d)
949 952
950 953 files = {}
951 954 try:
952 955 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
953 956 except util.Abort, e:
954 957 ui.write(_(" patch call failed:\n"))
955 958 ui.write(" " + str(e) + "\n")
956 959 patchproblems += 1
957 960 else:
958 961 if list(files) != [os.path.basename(fa)]:
959 962 ui.write(_(" unexpected patch output!\n"))
960 963 patchproblems += 1
961 964 a = file(fa).read()
962 965 if a != b:
963 966 ui.write(_(" patch test failed!\n"))
964 967 patchproblems += 1
965 968
966 969 if patchproblems:
967 970 if ui.config('ui', 'patch'):
968 971 ui.write(_(" (Current patch tool may be incompatible with patch,"
969 972 " or misconfigured. Please check your .hgrc file)\n"))
970 973 else:
971 974 ui.write(_(" Internal patcher failure, please report this error"
972 975 " to http://www.selenic.com/mercurial/bts\n"))
973 976 problems += patchproblems
974 977
975 978 os.unlink(fa)
976 979 os.unlink(fd)
977 980
978 981 # editor
979 982 ui.status(_("Checking commit editor...\n"))
980 983 editor = ui.geteditor()
981 984 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
982 985 if not cmdpath:
983 986 if editor == 'vi':
984 987 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
985 988 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
986 989 else:
987 990 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
988 991 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
989 992 problems += 1
990 993
991 994 # check username
992 995 ui.status(_("Checking username...\n"))
993 996 user = os.environ.get("HGUSER")
994 997 if user is None:
995 998 user = ui.config("ui", "username")
996 999 if user is None:
997 1000 user = os.environ.get("EMAIL")
998 1001 if not user:
999 1002 ui.warn(" ")
1000 1003 ui.username()
1001 1004 ui.write(_(" (specify a username in your .hgrc file)\n"))
1002 1005
1003 1006 if not problems:
1004 1007 ui.status(_("No problems detected\n"))
1005 1008 else:
1006 1009 ui.write(_("%s problems detected,"
1007 1010 " please check your install!\n") % problems)
1008 1011
1009 1012 return problems
1010 1013
1011 1014 def debugrename(ui, repo, file1, *pats, **opts):
1012 1015 """dump rename information"""
1013 1016
1014 1017 ctx = repo[opts.get('rev')]
1015 1018 m = cmdutil.match(repo, (file1,) + pats, opts)
1016 1019 for abs in ctx.walk(m):
1017 1020 fctx = ctx[abs]
1018 1021 o = fctx.filelog().renamed(fctx.filenode())
1019 1022 rel = m.rel(abs)
1020 1023 if o:
1021 1024 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1022 1025 else:
1023 1026 ui.write(_("%s not renamed\n") % rel)
1024 1027
1025 1028 def debugwalk(ui, repo, *pats, **opts):
1026 1029 """show how files match on given patterns"""
1027 1030 m = cmdutil.match(repo, pats, opts)
1028 1031 items = list(repo.walk(m))
1029 1032 if not items:
1030 1033 return
1031 1034 fmt = 'f %%-%ds %%-%ds %%s' % (
1032 1035 max([len(abs) for abs in items]),
1033 1036 max([len(m.rel(abs)) for abs in items]))
1034 1037 for abs in items:
1035 1038 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1036 1039 ui.write("%s\n" % line.rstrip())
1037 1040
1038 1041 def diff(ui, repo, *pats, **opts):
1039 1042 """diff repository (or selected files)
1040 1043
1041 1044 Show differences between revisions for the specified files.
1042 1045
1043 1046 Differences between files are shown using the unified diff format.
1044 1047
1045 1048 NOTE: diff may generate unexpected results for merges, as it will
1046 1049 default to comparing against the working directory's first parent
1047 1050 changeset if no revisions are specified.
1048 1051
1049 1052 When two revision arguments are given, then changes are shown
1050 1053 between those revisions. If only one revision is specified then
1051 1054 that revision is compared to the working directory, and, when no
1052 1055 revisions are specified, the working directory files are compared
1053 1056 to its parent.
1054 1057
1055 1058 Without the -a/--text option, diff will avoid generating diffs of
1056 1059 files it detects as binary. With -a, diff will generate a diff
1057 1060 anyway, probably with undesirable results.
1058 1061
1059 1062 Use the -g/--git option to generate diffs in the git extended diff
1060 1063 format. For more information, read 'hg help diffs'.
1061 1064 """
1062 1065
1063 1066 revs = opts.get('rev')
1064 1067 change = opts.get('change')
1065 1068
1066 1069 if revs and change:
1067 1070 msg = _('cannot specify --rev and --change at the same time')
1068 1071 raise util.Abort(msg)
1069 1072 elif change:
1070 1073 node2 = repo.lookup(change)
1071 1074 node1 = repo[node2].parents()[0].node()
1072 1075 else:
1073 1076 node1, node2 = cmdutil.revpair(repo, revs)
1074 1077
1075 1078 m = cmdutil.match(repo, pats, opts)
1076 1079 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1077 1080 for chunk in it:
1078 1081 repo.ui.write(chunk)
1079 1082
1080 1083 def export(ui, repo, *changesets, **opts):
1081 1084 """dump the header and diffs for one or more changesets
1082 1085
1083 1086 Print the changeset header and diffs for one or more revisions.
1084 1087
1085 1088 The information shown in the changeset header is: author,
1086 1089 changeset hash, parent(s) and commit comment.
1087 1090
1088 1091 NOTE: export may generate unexpected diff output for merge
1089 1092 changesets, as it will compare the merge changeset against its
1090 1093 first parent only.
1091 1094
1092 1095 Output may be to a file, in which case the name of the file is
1093 1096 given using a format string. The formatting rules are as follows:
1094 1097
1095 1098 %% literal "%" character
1096 1099 %H changeset hash (40 bytes of hexadecimal)
1097 1100 %N number of patches being generated
1098 1101 %R changeset revision number
1099 1102 %b basename of the exporting repository
1100 1103 %h short-form changeset hash (12 bytes of hexadecimal)
1101 1104 %n zero-padded sequence number, starting at 1
1102 1105 %r zero-padded changeset revision number
1103 1106
1104 1107 Without the -a/--text option, export will avoid generating diffs
1105 1108 of files it detects as binary. With -a, export will generate a
1106 1109 diff anyway, probably with undesirable results.
1107 1110
1108 1111 Use the -g/--git option to generate diffs in the git extended diff
1109 1112 format. Read the diffs help topic for more information.
1110 1113
1111 1114 With the --switch-parent option, the diff will be against the
1112 1115 second parent. It can be useful to review a merge.
1113 1116 """
1114 1117 if not changesets:
1115 1118 raise util.Abort(_("export requires at least one changeset"))
1116 1119 revs = cmdutil.revrange(repo, changesets)
1117 1120 if len(revs) > 1:
1118 1121 ui.note(_('exporting patches:\n'))
1119 1122 else:
1120 1123 ui.note(_('exporting patch:\n'))
1121 1124 patch.export(repo, revs, template=opts.get('output'),
1122 1125 switch_parent=opts.get('switch_parent'),
1123 1126 opts=patch.diffopts(ui, opts))
1124 1127
1125 1128 def grep(ui, repo, pattern, *pats, **opts):
1126 1129 """search for a pattern in specified files and revisions
1127 1130
1128 1131 Search revisions of files for a regular expression.
1129 1132
1130 1133 This command behaves differently than Unix grep. It only accepts
1131 1134 Python/Perl regexps. It searches repository history, not the
1132 1135 working directory. It always prints the revision number in which a
1133 1136 match appears.
1134 1137
1135 1138 By default, grep only prints output for the first revision of a
1136 1139 file in which it finds a match. To get it to print every revision
1137 1140 that contains a change in match status ("-" for a match that
1138 1141 becomes a non-match, or "+" for a non-match that becomes a match),
1139 1142 use the --all flag.
1140 1143 """
1141 1144 reflags = 0
1142 1145 if opts.get('ignore_case'):
1143 1146 reflags |= re.I
1144 1147 try:
1145 1148 regexp = re.compile(pattern, reflags)
1146 1149 except Exception, inst:
1147 1150 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1148 1151 return None
1149 1152 sep, eol = ':', '\n'
1150 1153 if opts.get('print0'):
1151 1154 sep = eol = '\0'
1152 1155
1153 1156 fcache = {}
1154 1157 def getfile(fn):
1155 1158 if fn not in fcache:
1156 1159 fcache[fn] = repo.file(fn)
1157 1160 return fcache[fn]
1158 1161
1159 1162 def matchlines(body):
1160 1163 begin = 0
1161 1164 linenum = 0
1162 1165 while True:
1163 1166 match = regexp.search(body, begin)
1164 1167 if not match:
1165 1168 break
1166 1169 mstart, mend = match.span()
1167 1170 linenum += body.count('\n', begin, mstart) + 1
1168 1171 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1169 1172 begin = body.find('\n', mend) + 1 or len(body)
1170 1173 lend = begin - 1
1171 1174 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1172 1175
1173 1176 class linestate(object):
1174 1177 def __init__(self, line, linenum, colstart, colend):
1175 1178 self.line = line
1176 1179 self.linenum = linenum
1177 1180 self.colstart = colstart
1178 1181 self.colend = colend
1179 1182
1180 1183 def __hash__(self):
1181 1184 return hash((self.linenum, self.line))
1182 1185
1183 1186 def __eq__(self, other):
1184 1187 return self.line == other.line
1185 1188
1186 1189 matches = {}
1187 1190 copies = {}
1188 1191 def grepbody(fn, rev, body):
1189 1192 matches[rev].setdefault(fn, [])
1190 1193 m = matches[rev][fn]
1191 1194 for lnum, cstart, cend, line in matchlines(body):
1192 1195 s = linestate(line, lnum, cstart, cend)
1193 1196 m.append(s)
1194 1197
1195 1198 def difflinestates(a, b):
1196 1199 sm = difflib.SequenceMatcher(None, a, b)
1197 1200 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1198 1201 if tag == 'insert':
1199 1202 for i in xrange(blo, bhi):
1200 1203 yield ('+', b[i])
1201 1204 elif tag == 'delete':
1202 1205 for i in xrange(alo, ahi):
1203 1206 yield ('-', a[i])
1204 1207 elif tag == 'replace':
1205 1208 for i in xrange(alo, ahi):
1206 1209 yield ('-', a[i])
1207 1210 for i in xrange(blo, bhi):
1208 1211 yield ('+', b[i])
1209 1212
1210 1213 prev = {}
1211 1214 def display(fn, rev, states, prevstates):
1212 1215 datefunc = ui.quiet and util.shortdate or util.datestr
1213 1216 found = False
1214 1217 filerevmatches = {}
1215 1218 r = prev.get(fn, -1)
1216 1219 if opts.get('all'):
1217 1220 iter = difflinestates(states, prevstates)
1218 1221 else:
1219 1222 iter = [('', l) for l in prevstates]
1220 1223 for change, l in iter:
1221 1224 cols = [fn, str(r)]
1222 1225 if opts.get('line_number'):
1223 1226 cols.append(str(l.linenum))
1224 1227 if opts.get('all'):
1225 1228 cols.append(change)
1226 1229 if opts.get('user'):
1227 1230 cols.append(ui.shortuser(get(r)[1]))
1228 1231 if opts.get('date'):
1229 1232 cols.append(datefunc(get(r)[2]))
1230 1233 if opts.get('files_with_matches'):
1231 1234 c = (fn, r)
1232 1235 if c in filerevmatches:
1233 1236 continue
1234 1237 filerevmatches[c] = 1
1235 1238 else:
1236 1239 cols.append(l.line)
1237 1240 ui.write(sep.join(cols), eol)
1238 1241 found = True
1239 1242 return found
1240 1243
1241 1244 fstate = {}
1242 1245 skip = {}
1243 1246 get = util.cachefunc(lambda r: repo[r].changeset())
1244 1247 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1245 1248 found = False
1246 1249 follow = opts.get('follow')
1247 1250 for st, rev, fns in changeiter:
1248 1251 if st == 'window':
1249 1252 matches.clear()
1250 1253 elif st == 'add':
1251 1254 ctx = repo[rev]
1252 1255 matches[rev] = {}
1253 1256 for fn in fns:
1254 1257 if fn in skip:
1255 1258 continue
1256 1259 try:
1257 1260 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1258 1261 fstate.setdefault(fn, [])
1259 1262 if follow:
1260 1263 copied = getfile(fn).renamed(ctx.filenode(fn))
1261 1264 if copied:
1262 1265 copies.setdefault(rev, {})[fn] = copied[0]
1263 1266 except error.LookupError:
1264 1267 pass
1265 1268 elif st == 'iter':
1266 1269 for fn, m in sorted(matches[rev].items()):
1267 1270 copy = copies.get(rev, {}).get(fn)
1268 1271 if fn in skip:
1269 1272 if copy:
1270 1273 skip[copy] = True
1271 1274 continue
1272 1275 if fn in prev or fstate[fn]:
1273 1276 r = display(fn, rev, m, fstate[fn])
1274 1277 found = found or r
1275 1278 if r and not opts.get('all'):
1276 1279 skip[fn] = True
1277 1280 if copy:
1278 1281 skip[copy] = True
1279 1282 fstate[fn] = m
1280 1283 if copy:
1281 1284 fstate[copy] = m
1282 1285 prev[fn] = rev
1283 1286
1284 1287 for fn, state in sorted(fstate.items()):
1285 1288 if fn in skip:
1286 1289 continue
1287 1290 if fn not in copies.get(prev[fn], {}):
1288 1291 found = display(fn, rev, {}, state) or found
1289 1292 return (not found and 1) or 0
1290 1293
1291 1294 def heads(ui, repo, *branchrevs, **opts):
1292 1295 """show current repository heads or show branch heads
1293 1296
1294 1297 With no arguments, show all repository head changesets.
1295 1298
1296 1299 If branch or revisions names are given this will show the heads of
1297 1300 the specified branches or the branches those revisions are tagged
1298 1301 with.
1299 1302
1300 1303 Repository "heads" are changesets that don't have child
1301 1304 changesets. They are where development generally takes place and
1302 1305 are the usual targets for update and merge operations.
1303 1306
1304 1307 Branch heads are changesets that have a given branch tag, but have
1305 1308 no child changesets with that tag. They are usually where
1306 1309 development on the given branch takes place.
1307 1310 """
1308 1311 if opts.get('rev'):
1309 1312 start = repo.lookup(opts['rev'])
1310 1313 else:
1311 1314 start = None
1312 1315 closed = not opts.get('active')
1313 1316 if not branchrevs:
1314 1317 # Assume we're looking repo-wide heads if no revs were specified.
1315 1318 heads = repo.heads(start, closed=closed)
1316 1319 else:
1317 1320 heads = []
1318 1321 visitedset = set()
1319 1322 for branchrev in branchrevs:
1320 1323 branch = repo[branchrev].branch()
1321 1324 if branch in visitedset:
1322 1325 continue
1323 1326 visitedset.add(branch)
1324 1327 bheads = repo.branchheads(branch, start, closed=closed)
1325 1328 if not bheads:
1326 1329 if branch != branchrev:
1327 1330 ui.warn(_("no changes on branch %s containing %s are "
1328 1331 "reachable from %s\n")
1329 1332 % (branch, branchrev, opts.get('rev')))
1330 1333 else:
1331 1334 ui.warn(_("no changes on branch %s are reachable from %s\n")
1332 1335 % (branch, opts.get('rev')))
1333 1336 heads.extend(bheads)
1334 1337 if not heads:
1335 1338 return 1
1336 1339 displayer = cmdutil.show_changeset(ui, repo, opts)
1337 1340 for n in heads:
1338 1341 displayer.show(repo[n])
1339 1342
1340 1343 def help_(ui, name=None, with_version=False):
1341 1344 """show help for a given topic or a help overview
1342 1345
1343 1346 With no arguments, print a list of commands and short help.
1344 1347
1345 1348 Given a topic, extension, or command name, print help for that
1346 1349 topic."""
1347 1350 option_lists = []
1348 1351
1349 1352 def addglobalopts(aliases):
1350 1353 if ui.verbose:
1351 1354 option_lists.append((_("global options:"), globalopts))
1352 1355 if name == 'shortlist':
1353 1356 option_lists.append((_('use "hg help" for the full list '
1354 1357 'of commands'), ()))
1355 1358 else:
1356 1359 if name == 'shortlist':
1357 1360 msg = _('use "hg help" for the full list of commands '
1358 1361 'or "hg -v" for details')
1359 1362 elif aliases:
1360 1363 msg = _('use "hg -v help%s" to show aliases and '
1361 1364 'global options') % (name and " " + name or "")
1362 1365 else:
1363 1366 msg = _('use "hg -v help %s" to show global options') % name
1364 1367 option_lists.append((msg, ()))
1365 1368
1366 1369 def helpcmd(name):
1367 1370 if with_version:
1368 1371 version_(ui)
1369 1372 ui.write('\n')
1370 1373
1371 1374 try:
1372 1375 aliases, i = cmdutil.findcmd(name, table, False)
1373 1376 except error.AmbiguousCommand, inst:
1374 1377 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1375 1378 helplist(_('list of commands:\n\n'), select)
1376 1379 return
1377 1380
1378 1381 # synopsis
1379 1382 if len(i) > 2:
1380 1383 if i[2].startswith('hg'):
1381 1384 ui.write("%s\n" % i[2])
1382 1385 else:
1383 1386 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1384 1387 else:
1385 1388 ui.write('hg %s\n' % aliases[0])
1386 1389
1387 1390 # aliases
1388 1391 if not ui.quiet and len(aliases) > 1:
1389 1392 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1390 1393
1391 1394 # description
1392 1395 doc = gettext(i[0].__doc__)
1393 1396 if not doc:
1394 1397 doc = _("(no help text available)")
1395 1398 if ui.quiet:
1396 1399 doc = doc.splitlines(0)[0]
1397 1400 ui.write("\n%s\n" % doc.rstrip())
1398 1401
1399 1402 if not ui.quiet:
1400 1403 # options
1401 1404 if i[1]:
1402 1405 option_lists.append((_("options:\n"), i[1]))
1403 1406
1404 1407 addglobalopts(False)
1405 1408
1406 1409 def helplist(header, select=None):
1407 1410 h = {}
1408 1411 cmds = {}
1409 1412 for c, e in table.iteritems():
1410 1413 f = c.split("|", 1)[0]
1411 1414 if select and not select(f):
1412 1415 continue
1413 1416 if (not select and name != 'shortlist' and
1414 1417 e[0].__module__ != __name__):
1415 1418 continue
1416 1419 if name == "shortlist" and not f.startswith("^"):
1417 1420 continue
1418 1421 f = f.lstrip("^")
1419 1422 if not ui.debugflag and f.startswith("debug"):
1420 1423 continue
1421 1424 doc = gettext(e[0].__doc__)
1422 1425 if not doc:
1423 1426 doc = _("(no help text available)")
1424 1427 h[f] = doc.splitlines(0)[0].rstrip()
1425 1428 cmds[f] = c.lstrip("^")
1426 1429
1427 1430 if not h:
1428 1431 ui.status(_('no commands defined\n'))
1429 1432 return
1430 1433
1431 1434 ui.status(header)
1432 1435 fns = sorted(h)
1433 1436 m = max(map(len, fns))
1434 1437 for f in fns:
1435 1438 if ui.verbose:
1436 1439 commands = cmds[f].replace("|",", ")
1437 1440 ui.write(" %s:\n %s\n"%(commands, h[f]))
1438 1441 else:
1439 1442 ui.write(' %-*s %s\n' % (m, f, h[f]))
1440 1443
1441 1444 exts = list(extensions.extensions())
1442 1445 if exts and name != 'shortlist':
1443 1446 ui.write(_('\nenabled extensions:\n\n'))
1444 1447 maxlength = 0
1445 1448 exthelps = []
1446 1449 for ename, ext in exts:
1447 1450 doc = (gettext(ext.__doc__) or _('(no help text available)'))
1448 1451 ename = ename.split('.')[-1]
1449 1452 maxlength = max(len(ename), maxlength)
1450 1453 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1451 1454 for ename, text in exthelps:
1452 1455 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1453 1456
1454 1457 if not ui.quiet:
1455 1458 addglobalopts(True)
1456 1459
1457 1460 def helptopic(name):
1458 1461 for names, header, doc in help.helptable:
1459 1462 if name in names:
1460 1463 break
1461 1464 else:
1462 1465 raise error.UnknownCommand(name)
1463 1466
1464 1467 # description
1465 1468 if not doc:
1466 1469 doc = _("(no help text available)")
1467 1470 if hasattr(doc, '__call__'):
1468 1471 doc = doc()
1469 1472
1470 1473 ui.write("%s\n" % header)
1471 1474 ui.write("%s\n" % doc.rstrip())
1472 1475
1473 1476 def helpext(name):
1474 1477 try:
1475 1478 mod = extensions.find(name)
1476 1479 except KeyError:
1477 1480 raise error.UnknownCommand(name)
1478 1481
1479 1482 doc = gettext(mod.__doc__) or _('no help text available')
1480 1483 doc = doc.splitlines(0)
1481 1484 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1482 1485 for d in doc[1:]:
1483 1486 ui.write(d, '\n')
1484 1487
1485 1488 ui.status('\n')
1486 1489
1487 1490 try:
1488 1491 ct = mod.cmdtable
1489 1492 except AttributeError:
1490 1493 ct = {}
1491 1494
1492 1495 modcmds = set([c.split('|', 1)[0] for c in ct])
1493 1496 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1494 1497
1495 1498 if name and name != 'shortlist':
1496 1499 i = None
1497 1500 for f in (helptopic, helpcmd, helpext):
1498 1501 try:
1499 1502 f(name)
1500 1503 i = None
1501 1504 break
1502 1505 except error.UnknownCommand, inst:
1503 1506 i = inst
1504 1507 if i:
1505 1508 raise i
1506 1509
1507 1510 else:
1508 1511 # program name
1509 1512 if ui.verbose or with_version:
1510 1513 version_(ui)
1511 1514 else:
1512 1515 ui.status(_("Mercurial Distributed SCM\n"))
1513 1516 ui.status('\n')
1514 1517
1515 1518 # list of commands
1516 1519 if name == "shortlist":
1517 1520 header = _('basic commands:\n\n')
1518 1521 else:
1519 1522 header = _('list of commands:\n\n')
1520 1523
1521 1524 helplist(header)
1522 1525
1523 1526 # list all option lists
1524 1527 opt_output = []
1525 1528 for title, options in option_lists:
1526 1529 opt_output.append(("\n%s" % title, None))
1527 1530 for shortopt, longopt, default, desc in options:
1528 1531 if "DEPRECATED" in desc and not ui.verbose: continue
1529 1532 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1530 1533 longopt and " --%s" % longopt),
1531 1534 "%s%s" % (desc,
1532 1535 default
1533 1536 and _(" (default: %s)") % default
1534 1537 or "")))
1535 1538
1536 1539 if not name:
1537 1540 ui.write(_("\nadditional help topics:\n\n"))
1538 1541 topics = []
1539 1542 for names, header, doc in help.helptable:
1540 1543 names = [(-len(name), name) for name in names]
1541 1544 names.sort()
1542 1545 topics.append((names[0][1], header))
1543 1546 topics_len = max([len(s[0]) for s in topics])
1544 1547 for t, desc in topics:
1545 1548 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1546 1549
1547 1550 if opt_output:
1548 1551 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1549 1552 for first, second in opt_output:
1550 1553 if second:
1551 1554 # wrap descriptions at 70 characters, just like the
1552 1555 # main help texts
1553 1556 second = textwrap.wrap(second, width=70 - opts_len - 3)
1554 1557 pad = '\n' + ' ' * (opts_len + 3)
1555 1558 ui.write(" %-*s %s\n" % (opts_len, first, pad.join(second)))
1556 1559 else:
1557 1560 ui.write("%s\n" % first)
1558 1561
1559 1562 def identify(ui, repo, source=None,
1560 1563 rev=None, num=None, id=None, branch=None, tags=None):
1561 1564 """identify the working copy or specified revision
1562 1565
1563 1566 With no revision, print a summary of the current state of the
1564 1567 repository.
1565 1568
1566 1569 With a path, do a lookup in another repository.
1567 1570
1568 1571 This summary identifies the repository state using one or two
1569 1572 parent hash identifiers, followed by a "+" if there are
1570 1573 uncommitted changes in the working directory, a list of tags for
1571 1574 this revision and a branch name for non-default branches.
1572 1575 """
1573 1576
1574 1577 if not repo and not source:
1575 1578 raise util.Abort(_("There is no Mercurial repository here "
1576 1579 "(.hg not found)"))
1577 1580
1578 1581 hexfunc = ui.debugflag and hex or short
1579 1582 default = not (num or id or branch or tags)
1580 1583 output = []
1581 1584
1582 1585 revs = []
1583 1586 if source:
1584 1587 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1585 1588 repo = hg.repository(ui, source)
1586 1589
1587 1590 if not repo.local():
1588 1591 if not rev and revs:
1589 1592 rev = revs[0]
1590 1593 if not rev:
1591 1594 rev = "tip"
1592 1595 if num or branch or tags:
1593 1596 raise util.Abort(
1594 1597 "can't query remote revision number, branch, or tags")
1595 1598 output = [hexfunc(repo.lookup(rev))]
1596 1599 elif not rev:
1597 1600 ctx = repo[None]
1598 1601 parents = ctx.parents()
1599 1602 changed = False
1600 1603 if default or id or num:
1601 1604 changed = ctx.files() + ctx.deleted()
1602 1605 if default or id:
1603 1606 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1604 1607 (changed) and "+" or "")]
1605 1608 if num:
1606 1609 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1607 1610 (changed) and "+" or ""))
1608 1611 else:
1609 1612 ctx = repo[rev]
1610 1613 if default or id:
1611 1614 output = [hexfunc(ctx.node())]
1612 1615 if num:
1613 1616 output.append(str(ctx.rev()))
1614 1617
1615 1618 if repo.local() and default and not ui.quiet:
1616 1619 b = encoding.tolocal(ctx.branch())
1617 1620 if b != 'default':
1618 1621 output.append("(%s)" % b)
1619 1622
1620 1623 # multiple tags for a single parent separated by '/'
1621 1624 t = "/".join(ctx.tags())
1622 1625 if t:
1623 1626 output.append(t)
1624 1627
1625 1628 if branch:
1626 1629 output.append(encoding.tolocal(ctx.branch()))
1627 1630
1628 1631 if tags:
1629 1632 output.extend(ctx.tags())
1630 1633
1631 1634 ui.write("%s\n" % ' '.join(output))
1632 1635
1633 1636 def import_(ui, repo, patch1, *patches, **opts):
1634 1637 """import an ordered set of patches
1635 1638
1636 1639 Import a list of patches and commit them individually.
1637 1640
1638 1641 If there are outstanding changes in the working directory, import
1639 1642 will abort unless given the -f/--force flag.
1640 1643
1641 1644 You can import a patch straight from a mail message. Even patches
1642 1645 as attachments work (body part must be type text/plain or
1643 1646 text/x-patch to be used). From and Subject headers of email
1644 1647 message are used as default committer and commit message. All
1645 1648 text/plain body parts before first diff are added to commit
1646 1649 message.
1647 1650
1648 1651 If the imported patch was generated by hg export, user and
1649 1652 description from patch override values from message headers and
1650 1653 body. Values given on command line with -m/--message and -u/--user
1651 1654 override these.
1652 1655
1653 1656 If --exact is specified, import will set the working directory to
1654 1657 the parent of each patch before applying it, and will abort if the
1655 1658 resulting changeset has a different ID than the one recorded in
1656 1659 the patch. This may happen due to character set problems or other
1657 1660 deficiencies in the text patch format.
1658 1661
1659 1662 With -s/--similarity, hg will attempt to discover renames and
1660 1663 copies in the patch in the same way as 'addremove'.
1661 1664
1662 1665 To read a patch from standard input, use patch name "-". See 'hg
1663 1666 help dates' for a list of formats valid for -d/--date.
1664 1667 """
1665 1668 patches = (patch1,) + patches
1666 1669
1667 1670 date = opts.get('date')
1668 1671 if date:
1669 1672 opts['date'] = util.parsedate(date)
1670 1673
1671 1674 try:
1672 1675 sim = float(opts.get('similarity') or 0)
1673 1676 except ValueError:
1674 1677 raise util.Abort(_('similarity must be a number'))
1675 1678 if sim < 0 or sim > 100:
1676 1679 raise util.Abort(_('similarity must be between 0 and 100'))
1677 1680
1678 1681 if opts.get('exact') or not opts.get('force'):
1679 1682 cmdutil.bail_if_changed(repo)
1680 1683
1681 1684 d = opts["base"]
1682 1685 strip = opts["strip"]
1683 1686 wlock = lock = None
1684 1687 try:
1685 1688 wlock = repo.wlock()
1686 1689 lock = repo.lock()
1687 1690 for p in patches:
1688 1691 pf = os.path.join(d, p)
1689 1692
1690 1693 if pf == '-':
1691 1694 ui.status(_("applying patch from stdin\n"))
1692 1695 pf = sys.stdin
1693 1696 else:
1694 1697 ui.status(_("applying %s\n") % p)
1695 1698 pf = url.open(ui, pf)
1696 1699 data = patch.extract(ui, pf)
1697 1700 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1698 1701
1699 1702 if tmpname is None:
1700 1703 raise util.Abort(_('no diffs found'))
1701 1704
1702 1705 try:
1703 1706 cmdline_message = cmdutil.logmessage(opts)
1704 1707 if cmdline_message:
1705 1708 # pickup the cmdline msg
1706 1709 message = cmdline_message
1707 1710 elif message:
1708 1711 # pickup the patch msg
1709 1712 message = message.strip()
1710 1713 else:
1711 1714 # launch the editor
1712 1715 message = None
1713 1716 ui.debug(_('message:\n%s\n') % message)
1714 1717
1715 1718 wp = repo.parents()
1716 1719 if opts.get('exact'):
1717 1720 if not nodeid or not p1:
1718 1721 raise util.Abort(_('not a mercurial patch'))
1719 1722 p1 = repo.lookup(p1)
1720 1723 p2 = repo.lookup(p2 or hex(nullid))
1721 1724
1722 1725 if p1 != wp[0].node():
1723 1726 hg.clean(repo, p1)
1724 1727 repo.dirstate.setparents(p1, p2)
1725 1728 elif p2:
1726 1729 try:
1727 1730 p1 = repo.lookup(p1)
1728 1731 p2 = repo.lookup(p2)
1729 1732 if p1 == wp[0].node():
1730 1733 repo.dirstate.setparents(p1, p2)
1731 1734 except error.RepoError:
1732 1735 pass
1733 1736 if opts.get('exact') or opts.get('import_branch'):
1734 1737 repo.dirstate.setbranch(branch or 'default')
1735 1738
1736 1739 files = {}
1737 1740 try:
1738 1741 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1739 1742 files=files)
1740 1743 finally:
1741 1744 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1742 1745 if not opts.get('no_commit'):
1743 1746 n = repo.commit(files, message, opts.get('user') or user,
1744 opts.get('date') or date)
1747 opts.get('date') or date,
1748 editor=cmdutil.commiteditor)
1745 1749 if opts.get('exact'):
1746 1750 if hex(n) != nodeid:
1747 1751 repo.rollback()
1748 1752 raise util.Abort(_('patch is damaged'
1749 1753 ' or loses information'))
1750 1754 # Force a dirstate write so that the next transaction
1751 1755 # backups an up-do-date file.
1752 1756 repo.dirstate.write()
1753 1757 finally:
1754 1758 os.unlink(tmpname)
1755 1759 finally:
1756 1760 release(lock, wlock)
1757 1761
1758 1762 def incoming(ui, repo, source="default", **opts):
1759 1763 """show new changesets found in source
1760 1764
1761 1765 Show new changesets found in the specified path/URL or the default
1762 1766 pull location. These are the changesets that would be pulled if a
1763 1767 pull was requested.
1764 1768
1765 1769 For remote repository, using --bundle avoids downloading the
1766 1770 changesets twice if the incoming is followed by a pull.
1767 1771
1768 1772 See pull for valid source format details.
1769 1773 """
1770 1774 limit = cmdutil.loglimit(opts)
1771 1775 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1772 1776 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1773 1777 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1774 1778 if revs:
1775 1779 revs = [other.lookup(rev) for rev in revs]
1776 1780 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1777 1781 force=opts["force"])
1778 1782 if not incoming:
1779 1783 try:
1780 1784 os.unlink(opts["bundle"])
1781 1785 except:
1782 1786 pass
1783 1787 ui.status(_("no changes found\n"))
1784 1788 return 1
1785 1789
1786 1790 cleanup = None
1787 1791 try:
1788 1792 fname = opts["bundle"]
1789 1793 if fname or not other.local():
1790 1794 # create a bundle (uncompressed if other repo is not local)
1791 1795
1792 1796 if revs is None and other.capable('changegroupsubset'):
1793 1797 revs = rheads
1794 1798
1795 1799 if revs is None:
1796 1800 cg = other.changegroup(incoming, "incoming")
1797 1801 else:
1798 1802 cg = other.changegroupsubset(incoming, revs, 'incoming')
1799 1803 bundletype = other.local() and "HG10BZ" or "HG10UN"
1800 1804 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1801 1805 # keep written bundle?
1802 1806 if opts["bundle"]:
1803 1807 cleanup = None
1804 1808 if not other.local():
1805 1809 # use the created uncompressed bundlerepo
1806 1810 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1807 1811
1808 1812 o = other.changelog.nodesbetween(incoming, revs)[0]
1809 1813 if opts.get('newest_first'):
1810 1814 o.reverse()
1811 1815 displayer = cmdutil.show_changeset(ui, other, opts)
1812 1816 count = 0
1813 1817 for n in o:
1814 1818 if count >= limit:
1815 1819 break
1816 1820 parents = [p for p in other.changelog.parents(n) if p != nullid]
1817 1821 if opts.get('no_merges') and len(parents) == 2:
1818 1822 continue
1819 1823 count += 1
1820 1824 displayer.show(other[n])
1821 1825 finally:
1822 1826 if hasattr(other, 'close'):
1823 1827 other.close()
1824 1828 if cleanup:
1825 1829 os.unlink(cleanup)
1826 1830
1827 1831 def init(ui, dest=".", **opts):
1828 1832 """create a new repository in the given directory
1829 1833
1830 1834 Initialize a new repository in the given directory. If the given
1831 1835 directory does not exist, it is created.
1832 1836
1833 1837 If no directory is given, the current directory is used.
1834 1838
1835 1839 It is possible to specify an ssh:// URL as the destination.
1836 1840 See 'hg help urls' for more information.
1837 1841 """
1838 1842 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1839 1843
1840 1844 def locate(ui, repo, *pats, **opts):
1841 1845 """locate files matching specific patterns
1842 1846
1843 1847 Print all files under Mercurial control whose names match the
1844 1848 given patterns.
1845 1849
1846 1850 This command searches the entire repository by default. To search
1847 1851 just the current directory and its subdirectories, use
1848 1852 "--include .".
1849 1853
1850 1854 If no patterns are given to match, this command prints all file
1851 1855 names.
1852 1856
1853 1857 If you want to feed the output of this command into the "xargs"
1854 1858 command, use the -0 option to both this command and "xargs". This
1855 1859 will avoid the problem of "xargs" treating single filenames that
1856 1860 contain white space as multiple filenames.
1857 1861 """
1858 1862 end = opts.get('print0') and '\0' or '\n'
1859 1863 rev = opts.get('rev') or None
1860 1864
1861 1865 ret = 1
1862 1866 m = cmdutil.match(repo, pats, opts, default='relglob')
1863 1867 m.bad = lambda x,y: False
1864 1868 for abs in repo[rev].walk(m):
1865 1869 if not rev and abs not in repo.dirstate:
1866 1870 continue
1867 1871 if opts.get('fullpath'):
1868 1872 ui.write(repo.wjoin(abs), end)
1869 1873 else:
1870 1874 ui.write(((pats and m.rel(abs)) or abs), end)
1871 1875 ret = 0
1872 1876
1873 1877 return ret
1874 1878
1875 1879 def log(ui, repo, *pats, **opts):
1876 1880 """show revision history of entire repository or files
1877 1881
1878 1882 Print the revision history of the specified files or the entire
1879 1883 project.
1880 1884
1881 1885 File history is shown without following rename or copy history of
1882 1886 files. Use -f/--follow with a file name to follow history across
1883 1887 renames and copies. --follow without a file name will only show
1884 1888 ancestors or descendants of the starting revision. --follow-first
1885 1889 only follows the first parent of merge revisions.
1886 1890
1887 1891 If no revision range is specified, the default is tip:0 unless
1888 1892 --follow is set, in which case the working directory parent is
1889 1893 used as the starting revision.
1890 1894
1891 1895 See 'hg help dates' for a list of formats valid for -d/--date.
1892 1896
1893 1897 By default this command outputs: changeset id and hash, tags,
1894 1898 non-trivial parents, user, date and time, and a summary for each
1895 1899 commit. When the -v/--verbose switch is used, the list of changed
1896 1900 files and full commit message is shown.
1897 1901
1898 1902 NOTE: log -p/--patch may generate unexpected diff output for merge
1899 1903 changesets, as it will only compare the merge changeset against
1900 1904 its first parent. Also, the files: list will only reflect files
1901 1905 that are different from BOTH parents.
1902 1906
1903 1907 """
1904 1908
1905 1909 get = util.cachefunc(lambda r: repo[r].changeset())
1906 1910 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1907 1911
1908 1912 limit = cmdutil.loglimit(opts)
1909 1913 count = 0
1910 1914
1911 1915 if opts.get('copies') and opts.get('rev'):
1912 1916 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1913 1917 else:
1914 1918 endrev = len(repo)
1915 1919 rcache = {}
1916 1920 ncache = {}
1917 1921 def getrenamed(fn, rev):
1918 1922 '''looks up all renames for a file (up to endrev) the first
1919 1923 time the file is given. It indexes on the changerev and only
1920 1924 parses the manifest if linkrev != changerev.
1921 1925 Returns rename info for fn at changerev rev.'''
1922 1926 if fn not in rcache:
1923 1927 rcache[fn] = {}
1924 1928 ncache[fn] = {}
1925 1929 fl = repo.file(fn)
1926 1930 for i in fl:
1927 1931 node = fl.node(i)
1928 1932 lr = fl.linkrev(i)
1929 1933 renamed = fl.renamed(node)
1930 1934 rcache[fn][lr] = renamed
1931 1935 if renamed:
1932 1936 ncache[fn][node] = renamed
1933 1937 if lr >= endrev:
1934 1938 break
1935 1939 if rev in rcache[fn]:
1936 1940 return rcache[fn][rev]
1937 1941
1938 1942 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1939 1943 # filectx logic.
1940 1944
1941 1945 try:
1942 1946 return repo[rev][fn].renamed()
1943 1947 except error.LookupError:
1944 1948 pass
1945 1949 return None
1946 1950
1947 1951 df = False
1948 1952 if opts["date"]:
1949 1953 df = util.matchdate(opts["date"])
1950 1954
1951 1955 only_branches = opts.get('only_branch')
1952 1956
1953 1957 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1954 1958 for st, rev, fns in changeiter:
1955 1959 if st == 'add':
1956 1960 parents = [p for p in repo.changelog.parentrevs(rev)
1957 1961 if p != nullrev]
1958 1962 if opts.get('no_merges') and len(parents) == 2:
1959 1963 continue
1960 1964 if opts.get('only_merges') and len(parents) != 2:
1961 1965 continue
1962 1966
1963 1967 if only_branches:
1964 1968 revbranch = get(rev)[5]['branch']
1965 1969 if revbranch not in only_branches:
1966 1970 continue
1967 1971
1968 1972 if df:
1969 1973 changes = get(rev)
1970 1974 if not df(changes[2][0]):
1971 1975 continue
1972 1976
1973 1977 if opts.get('keyword'):
1974 1978 changes = get(rev)
1975 1979 miss = 0
1976 1980 for k in [kw.lower() for kw in opts['keyword']]:
1977 1981 if not (k in changes[1].lower() or
1978 1982 k in changes[4].lower() or
1979 1983 k in " ".join(changes[3]).lower()):
1980 1984 miss = 1
1981 1985 break
1982 1986 if miss:
1983 1987 continue
1984 1988
1985 1989 if opts['user']:
1986 1990 changes = get(rev)
1987 1991 if not [k for k in opts['user'] if k in changes[1]]:
1988 1992 continue
1989 1993
1990 1994 copies = []
1991 1995 if opts.get('copies') and rev:
1992 1996 for fn in get(rev)[3]:
1993 1997 rename = getrenamed(fn, rev)
1994 1998 if rename:
1995 1999 copies.append((fn, rename[0]))
1996 2000 displayer.show(context.changectx(repo, rev), copies=copies)
1997 2001 elif st == 'iter':
1998 2002 if count == limit: break
1999 2003 if displayer.flush(rev):
2000 2004 count += 1
2001 2005
2002 2006 def manifest(ui, repo, node=None, rev=None):
2003 2007 """output the current or given revision of the project manifest
2004 2008
2005 2009 Print a list of version controlled files for the given revision.
2006 2010 If no revision is given, the first parent of the working directory
2007 2011 is used, or the null revision if none is checked out.
2008 2012
2009 2013 With -v flag, print file permissions, symlink and executable bits.
2010 2014 With --debug flag, print file revision hashes.
2011 2015 """
2012 2016
2013 2017 if rev and node:
2014 2018 raise util.Abort(_("please specify just one revision"))
2015 2019
2016 2020 if not node:
2017 2021 node = rev
2018 2022
2019 2023 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2020 2024 ctx = repo[node]
2021 2025 for f in ctx:
2022 2026 if ui.debugflag:
2023 2027 ui.write("%40s " % hex(ctx.manifest()[f]))
2024 2028 if ui.verbose:
2025 2029 ui.write(decor[ctx.flags(f)])
2026 2030 ui.write("%s\n" % f)
2027 2031
2028 2032 def merge(ui, repo, node=None, **opts):
2029 2033 """merge working directory with another revision
2030 2034
2031 2035 The contents of the current working directory is updated with all
2032 2036 changes made in the requested revision since the last common
2033 2037 predecessor revision.
2034 2038
2035 2039 Files that changed between either parent are marked as changed for
2036 2040 the next commit and a commit must be performed before any further
2037 2041 updates are allowed. The next commit has two parents.
2038 2042
2039 2043 If no revision is specified, the working directory's parent is a
2040 2044 head revision, and the current branch contains exactly one other
2041 2045 head, the other head is merged with by default. Otherwise, an
2042 2046 explicit revision to merge with must be provided.
2043 2047 """
2044 2048
2045 2049 if opts.get('rev') and node:
2046 2050 raise util.Abort(_("please specify just one revision"))
2047 2051 if not node:
2048 2052 node = opts.get('rev')
2049 2053
2050 2054 if not node:
2051 2055 branch = repo.changectx(None).branch()
2052 2056 bheads = repo.branchheads(branch)
2053 2057 if len(bheads) > 2:
2054 2058 raise util.Abort(_("branch '%s' has %d heads - "
2055 2059 "please merge with an explicit rev") %
2056 2060 (branch, len(bheads)))
2057 2061
2058 2062 parent = repo.dirstate.parents()[0]
2059 2063 if len(bheads) == 1:
2060 2064 if len(repo.heads()) > 1:
2061 2065 raise util.Abort(_("branch '%s' has one head - "
2062 2066 "please merge with an explicit rev") %
2063 2067 branch)
2064 2068 msg = _('there is nothing to merge')
2065 2069 if parent != repo.lookup(repo[None].branch()):
2066 2070 msg = _('%s - use "hg update" instead') % msg
2067 2071 raise util.Abort(msg)
2068 2072
2069 2073 if parent not in bheads:
2070 2074 raise util.Abort(_('working dir not at a head rev - '
2071 2075 'use "hg update" or merge with an explicit rev'))
2072 2076 node = parent == bheads[0] and bheads[-1] or bheads[0]
2073 2077
2074 2078 if opts.get('show'):
2075 2079 p1 = repo['.']
2076 2080 p2 = repo[node]
2077 2081 common = p1.ancestor(p2)
2078 2082 roots, heads = [common.node()], [p2.node()]
2079 2083 displayer = cmdutil.show_changeset(ui, repo, opts)
2080 2084 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2081 2085 displayer.show(repo[node])
2082 2086 return 0
2083 2087
2084 2088 return hg.merge(repo, node, force=opts.get('force'))
2085 2089
2086 2090 def outgoing(ui, repo, dest=None, **opts):
2087 2091 """show changesets not found in destination
2088 2092
2089 2093 Show changesets not found in the specified destination repository
2090 2094 or the default push location. These are the changesets that would
2091 2095 be pushed if a push was requested.
2092 2096
2093 2097 See pull for valid destination format details.
2094 2098 """
2095 2099 limit = cmdutil.loglimit(opts)
2096 2100 dest, revs, checkout = hg.parseurl(
2097 2101 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2098 2102 if revs:
2099 2103 revs = [repo.lookup(rev) for rev in revs]
2100 2104
2101 2105 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2102 2106 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2103 2107 o = repo.findoutgoing(other, force=opts.get('force'))
2104 2108 if not o:
2105 2109 ui.status(_("no changes found\n"))
2106 2110 return 1
2107 2111 o = repo.changelog.nodesbetween(o, revs)[0]
2108 2112 if opts.get('newest_first'):
2109 2113 o.reverse()
2110 2114 displayer = cmdutil.show_changeset(ui, repo, opts)
2111 2115 count = 0
2112 2116 for n in o:
2113 2117 if count >= limit:
2114 2118 break
2115 2119 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2116 2120 if opts.get('no_merges') and len(parents) == 2:
2117 2121 continue
2118 2122 count += 1
2119 2123 displayer.show(repo[n])
2120 2124
2121 2125 def parents(ui, repo, file_=None, **opts):
2122 2126 """show the parents of the working directory or revision
2123 2127
2124 2128 Print the working directory's parent revisions. If a revision is
2125 2129 given via -r/--rev, the parent of that revision will be printed.
2126 2130 If a file argument is given, revision in which the file was last
2127 2131 changed (before the working directory revision or the argument to
2128 2132 --rev if given) is printed.
2129 2133 """
2130 2134 rev = opts.get('rev')
2131 2135 if rev:
2132 2136 ctx = repo[rev]
2133 2137 else:
2134 2138 ctx = repo[None]
2135 2139
2136 2140 if file_:
2137 2141 m = cmdutil.match(repo, (file_,), opts)
2138 2142 if m.anypats() or len(m.files()) != 1:
2139 2143 raise util.Abort(_('can only specify an explicit file name'))
2140 2144 file_ = m.files()[0]
2141 2145 filenodes = []
2142 2146 for cp in ctx.parents():
2143 2147 if not cp:
2144 2148 continue
2145 2149 try:
2146 2150 filenodes.append(cp.filenode(file_))
2147 2151 except error.LookupError:
2148 2152 pass
2149 2153 if not filenodes:
2150 2154 raise util.Abort(_("'%s' not found in manifest!") % file_)
2151 2155 fl = repo.file(file_)
2152 2156 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2153 2157 else:
2154 2158 p = [cp.node() for cp in ctx.parents()]
2155 2159
2156 2160 displayer = cmdutil.show_changeset(ui, repo, opts)
2157 2161 for n in p:
2158 2162 if n != nullid:
2159 2163 displayer.show(repo[n])
2160 2164
2161 2165 def paths(ui, repo, search=None):
2162 2166 """show aliases for remote repositories
2163 2167
2164 2168 Show definition of symbolic path name NAME. If no name is given,
2165 2169 show definition of available names.
2166 2170
2167 2171 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2168 2172 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2169 2173
2170 2174 See 'hg help urls' for more information.
2171 2175 """
2172 2176 if search:
2173 2177 for name, path in ui.configitems("paths"):
2174 2178 if name == search:
2175 2179 ui.write("%s\n" % url.hidepassword(path))
2176 2180 return
2177 2181 ui.warn(_("not found!\n"))
2178 2182 return 1
2179 2183 else:
2180 2184 for name, path in ui.configitems("paths"):
2181 2185 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2182 2186
2183 2187 def postincoming(ui, repo, modheads, optupdate, checkout):
2184 2188 if modheads == 0:
2185 2189 return
2186 2190 if optupdate:
2187 2191 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2188 2192 return hg.update(repo, checkout)
2189 2193 else:
2190 2194 ui.status(_("not updating, since new heads added\n"))
2191 2195 if modheads > 1:
2192 2196 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2193 2197 else:
2194 2198 ui.status(_("(run 'hg update' to get a working copy)\n"))
2195 2199
2196 2200 def pull(ui, repo, source="default", **opts):
2197 2201 """pull changes from the specified source
2198 2202
2199 2203 Pull changes from a remote repository to the local one.
2200 2204
2201 2205 This finds all changes from the repository at the specified path
2202 2206 or URL and adds them to the local repository. By default, this
2203 2207 does not update the copy of the project in the working directory.
2204 2208
2205 2209 Use hg incoming if you want to see what will be added by the next
2206 2210 pull without actually adding the changes to the repository.
2207 2211
2208 2212 If SOURCE is omitted, the 'default' path will be used.
2209 2213 See 'hg help urls' for more information.
2210 2214 """
2211 2215 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2212 2216 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2213 2217 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2214 2218 if revs:
2215 2219 try:
2216 2220 revs = [other.lookup(rev) for rev in revs]
2217 2221 except error.CapabilityError:
2218 2222 err = _("Other repository doesn't support revision lookup, "
2219 2223 "so a rev cannot be specified.")
2220 2224 raise util.Abort(err)
2221 2225
2222 2226 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2223 2227 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2224 2228
2225 2229 def push(ui, repo, dest=None, **opts):
2226 2230 """push changes to the specified destination
2227 2231
2228 2232 Push changes from the local repository to the given destination.
2229 2233
2230 2234 This is the symmetrical operation for pull. It moves changes from
2231 2235 the current repository to a different one. If the destination is
2232 2236 local this is identical to a pull in that directory from the
2233 2237 current one.
2234 2238
2235 2239 By default, push will refuse to run if it detects the result would
2236 2240 increase the number of remote heads. This generally indicates the
2237 2241 the client has forgotten to pull and merge before pushing.
2238 2242
2239 2243 If -r/--rev is used, the named revision and all its ancestors will
2240 2244 be pushed to the remote repository.
2241 2245
2242 2246 Look at the help text for URLs for important details about ssh://
2243 2247 URLs. If DESTINATION is omitted, a default path will be used.
2244 2248 See 'hg help urls' for more information.
2245 2249 """
2246 2250 dest, revs, checkout = hg.parseurl(
2247 2251 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2248 2252 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2249 2253 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2250 2254 if revs:
2251 2255 revs = [repo.lookup(rev) for rev in revs]
2252 2256 r = repo.push(other, opts.get('force'), revs=revs)
2253 2257 return r == 0
2254 2258
2255 2259 def recover(ui, repo):
2256 2260 """roll back an interrupted transaction
2257 2261
2258 2262 Recover from an interrupted commit or pull.
2259 2263
2260 2264 This command tries to fix the repository status after an
2261 2265 interrupted operation. It should only be necessary when Mercurial
2262 2266 suggests it.
2263 2267 """
2264 2268 if repo.recover():
2265 2269 return hg.verify(repo)
2266 2270 return 1
2267 2271
2268 2272 def remove(ui, repo, *pats, **opts):
2269 2273 """remove the specified files on the next commit
2270 2274
2271 2275 Schedule the indicated files for removal from the repository.
2272 2276
2273 2277 This only removes files from the current branch, not from the
2274 2278 entire project history. -A/--after can be used to remove only
2275 2279 files that have already been deleted, -f/--force can be used to
2276 2280 force deletion, and -Af can be used to remove files from the next
2277 2281 revision without deleting them.
2278 2282
2279 2283 The following table details the behavior of remove for different
2280 2284 file states (columns) and option combinations (rows). The file
2281 2285 states are Added, Clean, Modified and Missing (as reported by hg
2282 2286 status). The actions are Warn, Remove (from branch) and Delete
2283 2287 (from disk).
2284 2288
2285 2289 A C M !
2286 2290 none W RD W R
2287 2291 -f R RD RD R
2288 2292 -A W W W R
2289 2293 -Af R R R R
2290 2294
2291 2295 This command schedules the files to be removed at the next commit.
2292 2296 To undo a remove before that, see hg revert.
2293 2297 """
2294 2298
2295 2299 after, force = opts.get('after'), opts.get('force')
2296 2300 if not pats and not after:
2297 2301 raise util.Abort(_('no files specified'))
2298 2302
2299 2303 m = cmdutil.match(repo, pats, opts)
2300 2304 s = repo.status(match=m, clean=True)
2301 2305 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2302 2306
2303 2307 def warn(files, reason):
2304 2308 for f in files:
2305 2309 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2306 2310 % (m.rel(f), reason))
2307 2311
2308 2312 if force:
2309 2313 remove, forget = modified + deleted + clean, added
2310 2314 elif after:
2311 2315 remove, forget = deleted, []
2312 2316 warn(modified + added + clean, _('still exists'))
2313 2317 else:
2314 2318 remove, forget = deleted + clean, []
2315 2319 warn(modified, _('is modified'))
2316 2320 warn(added, _('has been marked for add'))
2317 2321
2318 2322 for f in sorted(remove + forget):
2319 2323 if ui.verbose or not m.exact(f):
2320 2324 ui.status(_('removing %s\n') % m.rel(f))
2321 2325
2322 2326 repo.forget(forget)
2323 2327 repo.remove(remove, unlink=not after)
2324 2328
2325 2329 def rename(ui, repo, *pats, **opts):
2326 2330 """rename files; equivalent of copy + remove
2327 2331
2328 2332 Mark dest as copies of sources; mark sources for deletion. If dest
2329 2333 is a directory, copies are put in that directory. If dest is a
2330 2334 file, there can only be one source.
2331 2335
2332 2336 By default, this command copies the contents of files as they
2333 2337 exist in the working directory. If invoked with -A/--after, the
2334 2338 operation is recorded, but no copying is performed.
2335 2339
2336 2340 This command takes effect at the next commit. To undo a rename
2337 2341 before that, see hg revert.
2338 2342 """
2339 2343 wlock = repo.wlock(False)
2340 2344 try:
2341 2345 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2342 2346 finally:
2343 2347 wlock.release()
2344 2348
2345 2349 def resolve(ui, repo, *pats, **opts):
2346 2350 """retry file merges from a merge or update
2347 2351
2348 2352 This command will cleanly retry unresolved file merges using file
2349 2353 revisions preserved from the last update or merge. To attempt to
2350 2354 resolve all unresolved files, use the -a/--all switch.
2351 2355
2352 2356 If a conflict is resolved manually, please note that the changes
2353 2357 will be overwritten if the merge is retried with resolve. The
2354 2358 -m/--mark switch should be used to mark the file as resolved.
2355 2359
2356 2360 This command will also allow listing resolved files and manually
2357 2361 marking and unmarking files as resolved. All files must be marked
2358 2362 as resolved before the new commits are permitted.
2359 2363
2360 2364 The codes used to show the status of files are:
2361 2365 U = unresolved
2362 2366 R = resolved
2363 2367 """
2364 2368
2365 2369 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2366 2370
2367 2371 if (show and (mark or unmark)) or (mark and unmark):
2368 2372 raise util.Abort(_("too many options specified"))
2369 2373 if pats and all:
2370 2374 raise util.Abort(_("can't specify --all and patterns"))
2371 2375 if not (all or pats or show or mark or unmark):
2372 2376 raise util.Abort(_('no files or directories specified; '
2373 2377 'use --all to remerge all files'))
2374 2378
2375 2379 ms = merge_.mergestate(repo)
2376 2380 m = cmdutil.match(repo, pats, opts)
2377 2381
2378 2382 for f in ms:
2379 2383 if m(f):
2380 2384 if show:
2381 2385 ui.write("%s %s\n" % (ms[f].upper(), f))
2382 2386 elif mark:
2383 2387 ms.mark(f, "r")
2384 2388 elif unmark:
2385 2389 ms.mark(f, "u")
2386 2390 else:
2387 2391 wctx = repo[None]
2388 2392 mctx = wctx.parents()[-1]
2389 2393
2390 2394 # backup pre-resolve (merge uses .orig for its own purposes)
2391 2395 a = repo.wjoin(f)
2392 2396 util.copyfile(a, a + ".resolve")
2393 2397
2394 2398 # resolve file
2395 2399 ms.resolve(f, wctx, mctx)
2396 2400
2397 2401 # replace filemerge's .orig file with our resolve file
2398 2402 util.rename(a + ".resolve", a + ".orig")
2399 2403
2400 2404 def revert(ui, repo, *pats, **opts):
2401 2405 """restore individual files or directories to an earlier state
2402 2406
2403 2407 (Use update -r to check out earlier revisions, revert does not
2404 2408 change the working directory parents.)
2405 2409
2406 2410 With no revision specified, revert the named files or directories
2407 2411 to the contents they had in the parent of the working directory.
2408 2412 This restores the contents of the affected files to an unmodified
2409 2413 state and unschedules adds, removes, copies, and renames. If the
2410 2414 working directory has two parents, you must explicitly specify the
2411 2415 revision to revert to.
2412 2416
2413 2417 Using the -r/--rev option, revert the given files or directories
2414 2418 to their contents as of a specific revision. This can be helpful
2415 2419 to "roll back" some or all of an earlier change. See 'hg help
2416 2420 dates' for a list of formats valid for -d/--date.
2417 2421
2418 2422 Revert modifies the working directory. It does not commit any
2419 2423 changes, or change the parent of the working directory. If you
2420 2424 revert to a revision other than the parent of the working
2421 2425 directory, the reverted files will thus appear modified
2422 2426 afterwards.
2423 2427
2424 2428 If a file has been deleted, it is restored. If the executable mode
2425 2429 of a file was changed, it is reset.
2426 2430
2427 2431 If names are given, all files matching the names are reverted.
2428 2432 If no arguments are given, no files are reverted.
2429 2433
2430 2434 Modified files are saved with a .orig suffix before reverting.
2431 2435 To disable these backups, use --no-backup.
2432 2436 """
2433 2437
2434 2438 if opts["date"]:
2435 2439 if opts["rev"]:
2436 2440 raise util.Abort(_("you can't specify a revision and a date"))
2437 2441 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2438 2442
2439 2443 if not pats and not opts.get('all'):
2440 2444 raise util.Abort(_('no files or directories specified; '
2441 2445 'use --all to revert the whole repo'))
2442 2446
2443 2447 parent, p2 = repo.dirstate.parents()
2444 2448 if not opts.get('rev') and p2 != nullid:
2445 2449 raise util.Abort(_('uncommitted merge - please provide a '
2446 2450 'specific revision'))
2447 2451 ctx = repo[opts.get('rev')]
2448 2452 node = ctx.node()
2449 2453 mf = ctx.manifest()
2450 2454 if node == parent:
2451 2455 pmf = mf
2452 2456 else:
2453 2457 pmf = None
2454 2458
2455 2459 # need all matching names in dirstate and manifest of target rev,
2456 2460 # so have to walk both. do not print errors if files exist in one
2457 2461 # but not other.
2458 2462
2459 2463 names = {}
2460 2464
2461 2465 wlock = repo.wlock()
2462 2466 try:
2463 2467 # walk dirstate.
2464 2468
2465 2469 m = cmdutil.match(repo, pats, opts)
2466 2470 m.bad = lambda x,y: False
2467 2471 for abs in repo.walk(m):
2468 2472 names[abs] = m.rel(abs), m.exact(abs)
2469 2473
2470 2474 # walk target manifest.
2471 2475
2472 2476 def badfn(path, msg):
2473 2477 if path in names:
2474 2478 return False
2475 2479 path_ = path + '/'
2476 2480 for f in names:
2477 2481 if f.startswith(path_):
2478 2482 return False
2479 2483 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2480 2484 return False
2481 2485
2482 2486 m = cmdutil.match(repo, pats, opts)
2483 2487 m.bad = badfn
2484 2488 for abs in repo[node].walk(m):
2485 2489 if abs not in names:
2486 2490 names[abs] = m.rel(abs), m.exact(abs)
2487 2491
2488 2492 m = cmdutil.matchfiles(repo, names)
2489 2493 changes = repo.status(match=m)[:4]
2490 2494 modified, added, removed, deleted = map(set, changes)
2491 2495
2492 2496 # if f is a rename, also revert the source
2493 2497 cwd = repo.getcwd()
2494 2498 for f in added:
2495 2499 src = repo.dirstate.copied(f)
2496 2500 if src and src not in names and repo.dirstate[src] == 'r':
2497 2501 removed.add(src)
2498 2502 names[src] = (repo.pathto(src, cwd), True)
2499 2503
2500 2504 def removeforget(abs):
2501 2505 if repo.dirstate[abs] == 'a':
2502 2506 return _('forgetting %s\n')
2503 2507 return _('removing %s\n')
2504 2508
2505 2509 revert = ([], _('reverting %s\n'))
2506 2510 add = ([], _('adding %s\n'))
2507 2511 remove = ([], removeforget)
2508 2512 undelete = ([], _('undeleting %s\n'))
2509 2513
2510 2514 disptable = (
2511 2515 # dispatch table:
2512 2516 # file state
2513 2517 # action if in target manifest
2514 2518 # action if not in target manifest
2515 2519 # make backup if in target manifest
2516 2520 # make backup if not in target manifest
2517 2521 (modified, revert, remove, True, True),
2518 2522 (added, revert, remove, True, False),
2519 2523 (removed, undelete, None, False, False),
2520 2524 (deleted, revert, remove, False, False),
2521 2525 )
2522 2526
2523 2527 for abs, (rel, exact) in sorted(names.items()):
2524 2528 mfentry = mf.get(abs)
2525 2529 target = repo.wjoin(abs)
2526 2530 def handle(xlist, dobackup):
2527 2531 xlist[0].append(abs)
2528 2532 if dobackup and not opts.get('no_backup') and util.lexists(target):
2529 2533 bakname = "%s.orig" % rel
2530 2534 ui.note(_('saving current version of %s as %s\n') %
2531 2535 (rel, bakname))
2532 2536 if not opts.get('dry_run'):
2533 2537 util.copyfile(target, bakname)
2534 2538 if ui.verbose or not exact:
2535 2539 msg = xlist[1]
2536 2540 if not isinstance(msg, basestring):
2537 2541 msg = msg(abs)
2538 2542 ui.status(msg % rel)
2539 2543 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2540 2544 if abs not in table: continue
2541 2545 # file has changed in dirstate
2542 2546 if mfentry:
2543 2547 handle(hitlist, backuphit)
2544 2548 elif misslist is not None:
2545 2549 handle(misslist, backupmiss)
2546 2550 break
2547 2551 else:
2548 2552 if abs not in repo.dirstate:
2549 2553 if mfentry:
2550 2554 handle(add, True)
2551 2555 elif exact:
2552 2556 ui.warn(_('file not managed: %s\n') % rel)
2553 2557 continue
2554 2558 # file has not changed in dirstate
2555 2559 if node == parent:
2556 2560 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2557 2561 continue
2558 2562 if pmf is None:
2559 2563 # only need parent manifest in this unlikely case,
2560 2564 # so do not read by default
2561 2565 pmf = repo[parent].manifest()
2562 2566 if abs in pmf:
2563 2567 if mfentry:
2564 2568 # if version of file is same in parent and target
2565 2569 # manifests, do nothing
2566 2570 if (pmf[abs] != mfentry or
2567 2571 pmf.flags(abs) != mf.flags(abs)):
2568 2572 handle(revert, False)
2569 2573 else:
2570 2574 handle(remove, False)
2571 2575
2572 2576 if not opts.get('dry_run'):
2573 2577 def checkout(f):
2574 2578 fc = ctx[f]
2575 2579 repo.wwrite(f, fc.data(), fc.flags())
2576 2580
2577 2581 audit_path = util.path_auditor(repo.root)
2578 2582 for f in remove[0]:
2579 2583 if repo.dirstate[f] == 'a':
2580 2584 repo.dirstate.forget(f)
2581 2585 continue
2582 2586 audit_path(f)
2583 2587 try:
2584 2588 util.unlink(repo.wjoin(f))
2585 2589 except OSError:
2586 2590 pass
2587 2591 repo.dirstate.remove(f)
2588 2592
2589 2593 normal = None
2590 2594 if node == parent:
2591 2595 # We're reverting to our parent. If possible, we'd like status
2592 2596 # to report the file as clean. We have to use normallookup for
2593 2597 # merges to avoid losing information about merged/dirty files.
2594 2598 if p2 != nullid:
2595 2599 normal = repo.dirstate.normallookup
2596 2600 else:
2597 2601 normal = repo.dirstate.normal
2598 2602 for f in revert[0]:
2599 2603 checkout(f)
2600 2604 if normal:
2601 2605 normal(f)
2602 2606
2603 2607 for f in add[0]:
2604 2608 checkout(f)
2605 2609 repo.dirstate.add(f)
2606 2610
2607 2611 normal = repo.dirstate.normallookup
2608 2612 if node == parent and p2 == nullid:
2609 2613 normal = repo.dirstate.normal
2610 2614 for f in undelete[0]:
2611 2615 checkout(f)
2612 2616 normal(f)
2613 2617
2614 2618 finally:
2615 2619 wlock.release()
2616 2620
2617 2621 def rollback(ui, repo):
2618 2622 """roll back the last transaction
2619 2623
2620 2624 This command should be used with care. There is only one level of
2621 2625 rollback, and there is no way to undo a rollback. It will also
2622 2626 restore the dirstate at the time of the last transaction, losing
2623 2627 any dirstate changes since that time.
2624 2628
2625 2629 Transactions are used to encapsulate the effects of all commands
2626 2630 that create new changesets or propagate existing changesets into a
2627 2631 repository. For example, the following commands are transactional,
2628 2632 and their effects can be rolled back:
2629 2633
2630 2634 commit
2631 2635 import
2632 2636 pull
2633 2637 push (with this repository as destination)
2634 2638 unbundle
2635 2639
2636 2640 This command is not intended for use on public repositories. Once
2637 2641 changes are visible for pull by other users, rolling a transaction
2638 2642 back locally is ineffective (someone else may already have pulled
2639 2643 the changes). Furthermore, a race is possible with readers of the
2640 2644 repository; for example an in-progress pull from the repository
2641 2645 may fail if a rollback is performed.
2642 2646 """
2643 2647 repo.rollback()
2644 2648
2645 2649 def root(ui, repo):
2646 2650 """print the root (top) of the current working directory
2647 2651
2648 2652 Print the root directory of the current repository.
2649 2653 """
2650 2654 ui.write(repo.root + "\n")
2651 2655
2652 2656 def serve(ui, repo, **opts):
2653 2657 """export the repository via HTTP
2654 2658
2655 2659 Start a local HTTP repository browser and pull server.
2656 2660
2657 2661 By default, the server logs accesses to stdout and errors to
2658 2662 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2659 2663 files.
2660 2664 """
2661 2665
2662 2666 if opts["stdio"]:
2663 2667 if repo is None:
2664 2668 raise error.RepoError(_("There is no Mercurial repository here"
2665 2669 " (.hg not found)"))
2666 2670 s = sshserver.sshserver(ui, repo)
2667 2671 s.serve_forever()
2668 2672
2669 2673 baseui = repo and repo.baseui or ui
2670 2674 optlist = ("name templates style address port prefix ipv6"
2671 2675 " accesslog errorlog webdir_conf certificate")
2672 2676 for o in optlist.split():
2673 2677 if opts[o]:
2674 2678 baseui.setconfig("web", o, str(opts[o]))
2675 2679 if (repo is not None) and (repo.ui != baseui):
2676 2680 repo.ui.setconfig("web", o, str(opts[o]))
2677 2681
2678 2682 if repo is None and not ui.config("web", "webdir_conf"):
2679 2683 raise error.RepoError(_("There is no Mercurial repository here"
2680 2684 " (.hg not found)"))
2681 2685
2682 2686 class service:
2683 2687 def init(self):
2684 2688 util.set_signal_handler()
2685 2689 self.httpd = server.create_server(baseui, repo)
2686 2690
2687 2691 if not ui.verbose: return
2688 2692
2689 2693 if self.httpd.prefix:
2690 2694 prefix = self.httpd.prefix.strip('/') + '/'
2691 2695 else:
2692 2696 prefix = ''
2693 2697
2694 2698 port = ':%d' % self.httpd.port
2695 2699 if port == ':80':
2696 2700 port = ''
2697 2701
2698 2702 bindaddr = self.httpd.addr
2699 2703 if bindaddr == '0.0.0.0':
2700 2704 bindaddr = '*'
2701 2705 elif ':' in bindaddr: # IPv6
2702 2706 bindaddr = '[%s]' % bindaddr
2703 2707
2704 2708 fqaddr = self.httpd.fqaddr
2705 2709 if ':' in fqaddr:
2706 2710 fqaddr = '[%s]' % fqaddr
2707 2711 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2708 2712 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2709 2713
2710 2714 def run(self):
2711 2715 self.httpd.serve_forever()
2712 2716
2713 2717 service = service()
2714 2718
2715 2719 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2716 2720
2717 2721 def status(ui, repo, *pats, **opts):
2718 2722 """show changed files in the working directory
2719 2723
2720 2724 Show status of files in the repository. If names are given, only
2721 2725 files that match are shown. Files that are clean or ignored or
2722 2726 source of a copy/move operation, are not listed unless -c/--clean,
2723 2727 -i/--ignored, -C/--copies or -A/--all is given. Unless options
2724 2728 described with "show only ..." are given, the options -mardu are
2725 2729 used.
2726 2730
2727 2731 Option -q/--quiet hides untracked (unknown and ignored) files
2728 2732 unless explicitly requested with -u/--unknown or -i/--ignored.
2729 2733
2730 2734 NOTE: status may appear to disagree with diff if permissions have
2731 2735 changed or a merge has occurred. The standard diff format does not
2732 2736 report permission changes and diff only reports changes relative
2733 2737 to one merge parent.
2734 2738
2735 2739 If one revision is given, it is used as the base revision.
2736 2740 If two revisions are given, the difference between them is shown.
2737 2741
2738 2742 The codes used to show the status of files are:
2739 2743 M = modified
2740 2744 A = added
2741 2745 R = removed
2742 2746 C = clean
2743 2747 ! = missing (deleted by non-hg command, but still tracked)
2744 2748 ? = not tracked
2745 2749 I = ignored
2746 2750 = the previous added file was copied from here
2747 2751 """
2748 2752
2749 2753 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2750 2754 cwd = (pats and repo.getcwd()) or ''
2751 2755 end = opts.get('print0') and '\0' or '\n'
2752 2756 copy = {}
2753 2757 states = 'modified added removed deleted unknown ignored clean'.split()
2754 2758 show = [k for k in states if opts.get(k)]
2755 2759 if opts.get('all'):
2756 2760 show += ui.quiet and (states[:4] + ['clean']) or states
2757 2761 if not show:
2758 2762 show = ui.quiet and states[:4] or states[:5]
2759 2763
2760 2764 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2761 2765 'ignored' in show, 'clean' in show, 'unknown' in show)
2762 2766 changestates = zip(states, 'MAR!?IC', stat)
2763 2767
2764 2768 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2765 2769 ctxn = repo[nullid]
2766 2770 ctx1 = repo[node1]
2767 2771 ctx2 = repo[node2]
2768 2772 added = stat[1]
2769 2773 if node2 is None:
2770 2774 added = stat[0] + stat[1] # merged?
2771 2775
2772 2776 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2773 2777 if k in added:
2774 2778 copy[k] = v
2775 2779 elif v in added:
2776 2780 copy[v] = k
2777 2781
2778 2782 for state, char, files in changestates:
2779 2783 if state in show:
2780 2784 format = "%s %%s%s" % (char, end)
2781 2785 if opts.get('no_status'):
2782 2786 format = "%%s%s" % end
2783 2787
2784 2788 for f in files:
2785 2789 ui.write(format % repo.pathto(f, cwd))
2786 2790 if f in copy:
2787 2791 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2788 2792
2789 2793 def tag(ui, repo, name1, *names, **opts):
2790 2794 """add one or more tags for the current or given revision
2791 2795
2792 2796 Name a particular revision using <name>.
2793 2797
2794 2798 Tags are used to name particular revisions of the repository and are
2795 2799 very useful to compare different revisions, to go back to significant
2796 2800 earlier versions or to mark branch points as releases, etc.
2797 2801
2798 2802 If no revision is given, the parent of the working directory is
2799 2803 used, or tip if no revision is checked out.
2800 2804
2801 2805 To facilitate version control, distribution, and merging of tags,
2802 2806 they are stored as a file named ".hgtags" which is managed
2803 2807 similarly to other project files and can be hand-edited if
2804 2808 necessary. The file '.hg/localtags' is used for local tags (not
2805 2809 shared among repositories).
2806 2810
2807 2811 See 'hg help dates' for a list of formats valid for -d/--date.
2808 2812 """
2809 2813
2810 2814 rev_ = "."
2811 2815 names = (name1,) + names
2812 2816 if len(names) != len(set(names)):
2813 2817 raise util.Abort(_('tag names must be unique'))
2814 2818 for n in names:
2815 2819 if n in ['tip', '.', 'null']:
2816 2820 raise util.Abort(_('the name \'%s\' is reserved') % n)
2817 2821 if opts.get('rev') and opts.get('remove'):
2818 2822 raise util.Abort(_("--rev and --remove are incompatible"))
2819 2823 if opts.get('rev'):
2820 2824 rev_ = opts['rev']
2821 2825 message = opts.get('message')
2822 2826 if opts.get('remove'):
2823 2827 expectedtype = opts.get('local') and 'local' or 'global'
2824 2828 for n in names:
2825 2829 if not repo.tagtype(n):
2826 2830 raise util.Abort(_('tag \'%s\' does not exist') % n)
2827 2831 if repo.tagtype(n) != expectedtype:
2828 2832 if expectedtype == 'global':
2829 2833 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2830 2834 else:
2831 2835 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2832 2836 rev_ = nullid
2833 2837 if not message:
2834 2838 message = _('Removed tag %s') % ', '.join(names)
2835 2839 elif not opts.get('force'):
2836 2840 for n in names:
2837 2841 if n in repo.tags():
2838 2842 raise util.Abort(_('tag \'%s\' already exists '
2839 2843 '(use -f to force)') % n)
2840 2844 if not rev_ and repo.dirstate.parents()[1] != nullid:
2841 2845 raise util.Abort(_('uncommitted merge - please provide a '
2842 2846 'specific revision'))
2843 2847 r = repo[rev_].node()
2844 2848
2845 2849 if not message:
2846 2850 message = (_('Added tag %s for changeset %s') %
2847 2851 (', '.join(names), short(r)))
2848 2852
2849 2853 date = opts.get('date')
2850 2854 if date:
2851 2855 date = util.parsedate(date)
2852 2856
2853 2857 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2854 2858
2855 2859 def tags(ui, repo):
2856 2860 """list repository tags
2857 2861
2858 2862 This lists both regular and local tags. When the -v/--verbose
2859 2863 switch is used, a third column "local" is printed for local tags.
2860 2864 """
2861 2865
2862 2866 hexfunc = ui.debugflag and hex or short
2863 2867 tagtype = ""
2864 2868
2865 2869 for t, n in reversed(repo.tagslist()):
2866 2870 if ui.quiet:
2867 2871 ui.write("%s\n" % t)
2868 2872 continue
2869 2873
2870 2874 try:
2871 2875 hn = hexfunc(n)
2872 2876 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2873 2877 except error.LookupError:
2874 2878 r = " ?:%s" % hn
2875 2879 else:
2876 2880 spaces = " " * (30 - encoding.colwidth(t))
2877 2881 if ui.verbose:
2878 2882 if repo.tagtype(t) == 'local':
2879 2883 tagtype = " local"
2880 2884 else:
2881 2885 tagtype = ""
2882 2886 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2883 2887
2884 2888 def tip(ui, repo, **opts):
2885 2889 """show the tip revision
2886 2890
2887 2891 The tip revision (usually just called the tip) is the most
2888 2892 recently added changeset in the repository, the most recently
2889 2893 changed head.
2890 2894
2891 2895 If you have just made a commit, that commit will be the tip. If
2892 2896 you have just pulled changes from another repository, the tip of
2893 2897 that repository becomes the current tip. The "tip" tag is special
2894 2898 and cannot be renamed or assigned to a different changeset.
2895 2899 """
2896 2900 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2897 2901
2898 2902 def unbundle(ui, repo, fname1, *fnames, **opts):
2899 2903 """apply one or more changegroup files
2900 2904
2901 2905 Apply one or more compressed changegroup files generated by the
2902 2906 bundle command.
2903 2907 """
2904 2908 fnames = (fname1,) + fnames
2905 2909
2906 2910 lock = repo.lock()
2907 2911 try:
2908 2912 for fname in fnames:
2909 2913 f = url.open(ui, fname)
2910 2914 gen = changegroup.readbundle(f, fname)
2911 2915 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2912 2916 finally:
2913 2917 lock.release()
2914 2918
2915 2919 return postincoming(ui, repo, modheads, opts.get('update'), None)
2916 2920
2917 2921 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2918 2922 """update working directory
2919 2923
2920 2924 Update the repository's working directory to the specified
2921 2925 revision, or the tip of the current branch if none is specified.
2922 2926 Use null as the revision to remove the working copy (like 'hg
2923 2927 clone -U').
2924 2928
2925 2929 When the working directory contains no uncommitted changes, it
2926 2930 will be replaced by the state of the requested revision from the
2927 2931 repository. When the requested revision is on a different branch,
2928 2932 the working directory will additionally be switched to that
2929 2933 branch.
2930 2934
2931 2935 When there are uncommitted changes, use option -C/--clean to
2932 2936 discard them, forcibly replacing the state of the working
2933 2937 directory with the requested revision.
2934 2938
2935 2939 When there are uncommitted changes and option -C/--clean is not
2936 2940 used, and the parent revision and requested revision are on the
2937 2941 same branch, and one of them is an ancestor of the other, then the
2938 2942 new working directory will contain the requested revision merged
2939 2943 with the uncommitted changes. Otherwise, the update will fail with
2940 2944 a suggestion to use 'merge' or 'update -C' instead.
2941 2945
2942 2946 If you want to update just one file to an older revision, use
2943 2947 revert.
2944 2948
2945 2949 See 'hg help dates' for a list of formats valid for -d/--date.
2946 2950 """
2947 2951 if rev and node:
2948 2952 raise util.Abort(_("please specify just one revision"))
2949 2953
2950 2954 if not rev:
2951 2955 rev = node
2952 2956
2953 2957 if date:
2954 2958 if rev:
2955 2959 raise util.Abort(_("you can't specify a revision and a date"))
2956 2960 rev = cmdutil.finddate(ui, repo, date)
2957 2961
2958 2962 if clean:
2959 2963 return hg.clean(repo, rev)
2960 2964 else:
2961 2965 return hg.update(repo, rev)
2962 2966
2963 2967 def verify(ui, repo):
2964 2968 """verify the integrity of the repository
2965 2969
2966 2970 Verify the integrity of the current repository.
2967 2971
2968 2972 This will perform an extensive check of the repository's
2969 2973 integrity, validating the hashes and checksums of each entry in
2970 2974 the changelog, manifest, and tracked files, as well as the
2971 2975 integrity of their crosslinks and indices.
2972 2976 """
2973 2977 return hg.verify(repo)
2974 2978
2975 2979 def version_(ui):
2976 2980 """output version and copyright information"""
2977 2981 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2978 2982 % util.version())
2979 2983 ui.status(_(
2980 2984 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2981 2985 "This is free software; see the source for copying conditions. "
2982 2986 "There is NO\nwarranty; "
2983 2987 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2984 2988 ))
2985 2989
2986 2990 # Command options and aliases are listed here, alphabetically
2987 2991
2988 2992 globalopts = [
2989 2993 ('R', 'repository', '',
2990 2994 _('repository root directory or symbolic path name')),
2991 2995 ('', 'cwd', '', _('change working directory')),
2992 2996 ('y', 'noninteractive', None,
2993 2997 _('do not prompt, assume \'yes\' for any required answers')),
2994 2998 ('q', 'quiet', None, _('suppress output')),
2995 2999 ('v', 'verbose', None, _('enable additional output')),
2996 3000 ('', 'config', [], _('set/override config option')),
2997 3001 ('', 'debug', None, _('enable debugging output')),
2998 3002 ('', 'debugger', None, _('start debugger')),
2999 3003 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3000 3004 ('', 'encodingmode', encoding.encodingmode,
3001 3005 _('set the charset encoding mode')),
3002 3006 ('', 'traceback', None, _('print traceback on exception')),
3003 3007 ('', 'time', None, _('time how long the command takes')),
3004 3008 ('', 'profile', None, _('print command execution profile')),
3005 3009 ('', 'version', None, _('output version information and exit')),
3006 3010 ('h', 'help', None, _('display help and exit')),
3007 3011 ]
3008 3012
3009 3013 dryrunopts = [('n', 'dry-run', None,
3010 3014 _('do not perform actions, just print output'))]
3011 3015
3012 3016 remoteopts = [
3013 3017 ('e', 'ssh', '', _('specify ssh command to use')),
3014 3018 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3015 3019 ]
3016 3020
3017 3021 walkopts = [
3018 3022 ('I', 'include', [], _('include names matching the given patterns')),
3019 3023 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3020 3024 ]
3021 3025
3022 3026 commitopts = [
3023 3027 ('m', 'message', '', _('use <text> as commit message')),
3024 3028 ('l', 'logfile', '', _('read commit message from <file>')),
3025 3029 ]
3026 3030
3027 3031 commitopts2 = [
3028 3032 ('d', 'date', '', _('record datecode as commit date')),
3029 3033 ('u', 'user', '', _('record the specified user as committer')),
3030 3034 ]
3031 3035
3032 3036 templateopts = [
3033 3037 ('', 'style', '', _('display using template map file')),
3034 3038 ('', 'template', '', _('display with template')),
3035 3039 ]
3036 3040
3037 3041 logopts = [
3038 3042 ('p', 'patch', None, _('show patch')),
3039 3043 ('g', 'git', None, _('use git extended diff format')),
3040 3044 ('l', 'limit', '', _('limit number of changes displayed')),
3041 3045 ('M', 'no-merges', None, _('do not show merges')),
3042 3046 ] + templateopts
3043 3047
3044 3048 diffopts = [
3045 3049 ('a', 'text', None, _('treat all files as text')),
3046 3050 ('g', 'git', None, _('use git extended diff format')),
3047 3051 ('', 'nodates', None, _("don't include dates in diff headers"))
3048 3052 ]
3049 3053
3050 3054 diffopts2 = [
3051 3055 ('p', 'show-function', None, _('show which function each change is in')),
3052 3056 ('w', 'ignore-all-space', None,
3053 3057 _('ignore white space when comparing lines')),
3054 3058 ('b', 'ignore-space-change', None,
3055 3059 _('ignore changes in the amount of white space')),
3056 3060 ('B', 'ignore-blank-lines', None,
3057 3061 _('ignore changes whose lines are all blank')),
3058 3062 ('U', 'unified', '', _('number of lines of context to show'))
3059 3063 ]
3060 3064
3061 3065 similarityopts = [
3062 3066 ('s', 'similarity', '',
3063 3067 _('guess renamed files by similarity (0<=s<=100)'))
3064 3068 ]
3065 3069
3066 3070 table = {
3067 3071 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3068 3072 "addremove":
3069 3073 (addremove, similarityopts + walkopts + dryrunopts,
3070 3074 _('[OPTION]... [FILE]...')),
3071 3075 "^annotate|blame":
3072 3076 (annotate,
3073 3077 [('r', 'rev', '', _('annotate the specified revision')),
3074 3078 ('f', 'follow', None, _('follow file copies and renames')),
3075 3079 ('a', 'text', None, _('treat all files as text')),
3076 3080 ('u', 'user', None, _('list the author (long with -v)')),
3077 3081 ('d', 'date', None, _('list the date (short with -q)')),
3078 3082 ('n', 'number', None, _('list the revision number (default)')),
3079 3083 ('c', 'changeset', None, _('list the changeset')),
3080 3084 ('l', 'line-number', None,
3081 3085 _('show line number at the first appearance'))
3082 3086 ] + walkopts,
3083 3087 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3084 3088 "archive":
3085 3089 (archive,
3086 3090 [('', 'no-decode', None, _('do not pass files through decoders')),
3087 3091 ('p', 'prefix', '', _('directory prefix for files in archive')),
3088 3092 ('r', 'rev', '', _('revision to distribute')),
3089 3093 ('t', 'type', '', _('type of distribution to create')),
3090 3094 ] + walkopts,
3091 3095 _('[OPTION]... DEST')),
3092 3096 "backout":
3093 3097 (backout,
3094 3098 [('', 'merge', None,
3095 3099 _('merge with old dirstate parent after backout')),
3096 3100 ('', 'parent', '', _('parent to choose when backing out merge')),
3097 3101 ('r', 'rev', '', _('revision to backout')),
3098 3102 ] + walkopts + commitopts + commitopts2,
3099 3103 _('[OPTION]... [-r] REV')),
3100 3104 "bisect":
3101 3105 (bisect,
3102 3106 [('r', 'reset', False, _('reset bisect state')),
3103 3107 ('g', 'good', False, _('mark changeset good')),
3104 3108 ('b', 'bad', False, _('mark changeset bad')),
3105 3109 ('s', 'skip', False, _('skip testing changeset')),
3106 3110 ('c', 'command', '', _('use command to check changeset state')),
3107 3111 ('U', 'noupdate', False, _('do not update to target'))],
3108 3112 _("[-gbsr] [-c CMD] [REV]")),
3109 3113 "branch":
3110 3114 (branch,
3111 3115 [('f', 'force', None,
3112 3116 _('set branch name even if it shadows an existing branch')),
3113 3117 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3114 3118 _('[-fC] [NAME]')),
3115 3119 "branches":
3116 3120 (branches,
3117 3121 [('a', 'active', False,
3118 3122 _('show only branches that have unmerged heads'))],
3119 3123 _('[-a]')),
3120 3124 "bundle":
3121 3125 (bundle,
3122 3126 [('f', 'force', None,
3123 3127 _('run even when remote repository is unrelated')),
3124 3128 ('r', 'rev', [],
3125 3129 _('a changeset up to which you would like to bundle')),
3126 3130 ('', 'base', [],
3127 3131 _('a base changeset to specify instead of a destination')),
3128 3132 ('a', 'all', None, _('bundle all changesets in the repository')),
3129 3133 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3130 3134 ] + remoteopts,
3131 3135 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3132 3136 "cat":
3133 3137 (cat,
3134 3138 [('o', 'output', '', _('print output to file with formatted name')),
3135 3139 ('r', 'rev', '', _('print the given revision')),
3136 3140 ('', 'decode', None, _('apply any matching decode filter')),
3137 3141 ] + walkopts,
3138 3142 _('[OPTION]... FILE...')),
3139 3143 "^clone":
3140 3144 (clone,
3141 3145 [('U', 'noupdate', None,
3142 3146 _('the clone will only contain a repository (no working copy)')),
3143 3147 ('r', 'rev', [],
3144 3148 _('a changeset you would like to have after cloning')),
3145 3149 ('', 'pull', None, _('use pull protocol to copy metadata')),
3146 3150 ('', 'uncompressed', None,
3147 3151 _('use uncompressed transfer (fast over LAN)')),
3148 3152 ] + remoteopts,
3149 3153 _('[OPTION]... SOURCE [DEST]')),
3150 3154 "^commit|ci":
3151 3155 (commit,
3152 3156 [('A', 'addremove', None,
3153 3157 _('mark new/missing files as added/removed before committing')),
3154 3158 ('', 'close-branch', None,
3155 3159 _('mark a branch as closed, hiding it from the branch list')),
3156 3160 ] + walkopts + commitopts + commitopts2,
3157 3161 _('[OPTION]... [FILE]...')),
3158 3162 "copy|cp":
3159 3163 (copy,
3160 3164 [('A', 'after', None, _('record a copy that has already occurred')),
3161 3165 ('f', 'force', None,
3162 3166 _('forcibly copy over an existing managed file')),
3163 3167 ] + walkopts + dryrunopts,
3164 3168 _('[OPTION]... [SOURCE]... DEST')),
3165 3169 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3166 3170 "debugcheckstate": (debugcheckstate, []),
3167 3171 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3168 3172 "debugcomplete":
3169 3173 (debugcomplete,
3170 3174 [('o', 'options', None, _('show the command options'))],
3171 3175 _('[-o] CMD')),
3172 3176 "debugdate":
3173 3177 (debugdate,
3174 3178 [('e', 'extended', None, _('try extended date formats'))],
3175 3179 _('[-e] DATE [RANGE]')),
3176 3180 "debugdata": (debugdata, [], _('FILE REV')),
3177 3181 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3178 3182 "debugindex": (debugindex, [], _('FILE')),
3179 3183 "debugindexdot": (debugindexdot, [], _('FILE')),
3180 3184 "debuginstall": (debuginstall, []),
3181 3185 "debugrebuildstate":
3182 3186 (debugrebuildstate,
3183 3187 [('r', 'rev', '', _('revision to rebuild to'))],
3184 3188 _('[-r REV] [REV]')),
3185 3189 "debugrename":
3186 3190 (debugrename,
3187 3191 [('r', 'rev', '', _('revision to debug'))],
3188 3192 _('[-r REV] FILE')),
3189 3193 "debugsetparents":
3190 3194 (debugsetparents, [], _('REV1 [REV2]')),
3191 3195 "debugstate":
3192 3196 (debugstate,
3193 3197 [('', 'nodates', None, _('do not display the saved mtime'))],
3194 3198 _('[OPTION]...')),
3195 3199 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3196 3200 "^diff":
3197 3201 (diff,
3198 3202 [('r', 'rev', [], _('revision')),
3199 3203 ('c', 'change', '', _('change made by revision'))
3200 3204 ] + diffopts + diffopts2 + walkopts,
3201 3205 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3202 3206 "^export":
3203 3207 (export,
3204 3208 [('o', 'output', '', _('print output to file with formatted name')),
3205 3209 ('', 'switch-parent', None, _('diff against the second parent'))
3206 3210 ] + diffopts,
3207 3211 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3208 3212 "grep":
3209 3213 (grep,
3210 3214 [('0', 'print0', None, _('end fields with NUL')),
3211 3215 ('', 'all', None, _('print all revisions that match')),
3212 3216 ('f', 'follow', None,
3213 3217 _('follow changeset history, or file history across copies and renames')),
3214 3218 ('i', 'ignore-case', None, _('ignore case when matching')),
3215 3219 ('l', 'files-with-matches', None,
3216 3220 _('print only filenames and revisions that match')),
3217 3221 ('n', 'line-number', None, _('print matching line numbers')),
3218 3222 ('r', 'rev', [], _('search in given revision range')),
3219 3223 ('u', 'user', None, _('list the author (long with -v)')),
3220 3224 ('d', 'date', None, _('list the date (short with -q)')),
3221 3225 ] + walkopts,
3222 3226 _('[OPTION]... PATTERN [FILE]...')),
3223 3227 "heads":
3224 3228 (heads,
3225 3229 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3226 3230 ('a', 'active', False,
3227 3231 _('show only the active heads from open branches')),
3228 3232 ] + templateopts,
3229 3233 _('[-r REV] [REV]...')),
3230 3234 "help": (help_, [], _('[TOPIC]')),
3231 3235 "identify|id":
3232 3236 (identify,
3233 3237 [('r', 'rev', '', _('identify the specified revision')),
3234 3238 ('n', 'num', None, _('show local revision number')),
3235 3239 ('i', 'id', None, _('show global revision id')),
3236 3240 ('b', 'branch', None, _('show branch')),
3237 3241 ('t', 'tags', None, _('show tags'))],
3238 3242 _('[-nibt] [-r REV] [SOURCE]')),
3239 3243 "import|patch":
3240 3244 (import_,
3241 3245 [('p', 'strip', 1,
3242 3246 _('directory strip option for patch. This has the same '
3243 3247 'meaning as the corresponding patch option')),
3244 3248 ('b', 'base', '', _('base path')),
3245 3249 ('f', 'force', None,
3246 3250 _('skip check for outstanding uncommitted changes')),
3247 3251 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3248 3252 ('', 'exact', None,
3249 3253 _('apply patch to the nodes from which it was generated')),
3250 3254 ('', 'import-branch', None,
3251 3255 _('use any branch information in patch (implied by --exact)'))] +
3252 3256 commitopts + commitopts2 + similarityopts,
3253 3257 _('[OPTION]... PATCH...')),
3254 3258 "incoming|in":
3255 3259 (incoming,
3256 3260 [('f', 'force', None,
3257 3261 _('run even when remote repository is unrelated')),
3258 3262 ('n', 'newest-first', None, _('show newest record first')),
3259 3263 ('', 'bundle', '', _('file to store the bundles into')),
3260 3264 ('r', 'rev', [],
3261 3265 _('a specific revision up to which you would like to pull')),
3262 3266 ] + logopts + remoteopts,
3263 3267 _('[-p] [-n] [-M] [-f] [-r REV]...'
3264 3268 ' [--bundle FILENAME] [SOURCE]')),
3265 3269 "^init":
3266 3270 (init,
3267 3271 remoteopts,
3268 3272 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3269 3273 "locate":
3270 3274 (locate,
3271 3275 [('r', 'rev', '', _('search the repository as it stood at REV')),
3272 3276 ('0', 'print0', None,
3273 3277 _('end filenames with NUL, for use with xargs')),
3274 3278 ('f', 'fullpath', None,
3275 3279 _('print complete paths from the filesystem root')),
3276 3280 ] + walkopts,
3277 3281 _('[OPTION]... [PATTERN]...')),
3278 3282 "^log|history":
3279 3283 (log,
3280 3284 [('f', 'follow', None,
3281 3285 _('follow changeset history, or file history across copies and renames')),
3282 3286 ('', 'follow-first', None,
3283 3287 _('only follow the first parent of merge changesets')),
3284 3288 ('d', 'date', '', _('show revisions matching date spec')),
3285 3289 ('C', 'copies', None, _('show copied files')),
3286 3290 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3287 3291 ('r', 'rev', [], _('show the specified revision or range')),
3288 3292 ('', 'removed', None, _('include revisions where files were removed')),
3289 3293 ('m', 'only-merges', None, _('show only merges')),
3290 3294 ('u', 'user', [], _('revisions committed by user')),
3291 3295 ('b', 'only-branch', [],
3292 3296 _('show only changesets within the given named branch')),
3293 3297 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3294 3298 ] + logopts + walkopts,
3295 3299 _('[OPTION]... [FILE]')),
3296 3300 "manifest":
3297 3301 (manifest,
3298 3302 [('r', 'rev', '', _('revision to display'))],
3299 3303 _('[-r REV]')),
3300 3304 "^merge":
3301 3305 (merge,
3302 3306 [('f', 'force', None, _('force a merge with outstanding changes')),
3303 3307 ('r', 'rev', '', _('revision to merge')),
3304 3308 ('S', 'show', None,
3305 3309 _('review revisions to merge (no merge is performed)'))],
3306 3310 _('[-f] [[-r] REV]')),
3307 3311 "outgoing|out":
3308 3312 (outgoing,
3309 3313 [('f', 'force', None,
3310 3314 _('run even when remote repository is unrelated')),
3311 3315 ('r', 'rev', [],
3312 3316 _('a specific revision up to which you would like to push')),
3313 3317 ('n', 'newest-first', None, _('show newest record first')),
3314 3318 ] + logopts + remoteopts,
3315 3319 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3316 3320 "^parents":
3317 3321 (parents,
3318 3322 [('r', 'rev', '', _('show parents from the specified revision')),
3319 3323 ] + templateopts,
3320 3324 _('hg parents [-r REV] [FILE]')),
3321 3325 "paths": (paths, [], _('[NAME]')),
3322 3326 "^pull":
3323 3327 (pull,
3324 3328 [('u', 'update', None,
3325 3329 _('update to new tip if changesets were pulled')),
3326 3330 ('f', 'force', None,
3327 3331 _('run even when remote repository is unrelated')),
3328 3332 ('r', 'rev', [],
3329 3333 _('a specific revision up to which you would like to pull')),
3330 3334 ] + remoteopts,
3331 3335 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3332 3336 "^push":
3333 3337 (push,
3334 3338 [('f', 'force', None, _('force push')),
3335 3339 ('r', 'rev', [],
3336 3340 _('a specific revision up to which you would like to push')),
3337 3341 ] + remoteopts,
3338 3342 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3339 3343 "recover": (recover, []),
3340 3344 "^remove|rm":
3341 3345 (remove,
3342 3346 [('A', 'after', None, _('record delete for missing files')),
3343 3347 ('f', 'force', None,
3344 3348 _('remove (and delete) file even if added or modified')),
3345 3349 ] + walkopts,
3346 3350 _('[OPTION]... FILE...')),
3347 3351 "rename|mv":
3348 3352 (rename,
3349 3353 [('A', 'after', None, _('record a rename that has already occurred')),
3350 3354 ('f', 'force', None,
3351 3355 _('forcibly copy over an existing managed file')),
3352 3356 ] + walkopts + dryrunopts,
3353 3357 _('[OPTION]... SOURCE... DEST')),
3354 3358 "resolve":
3355 3359 (resolve,
3356 3360 [('a', 'all', None, _('remerge all unresolved files')),
3357 3361 ('l', 'list', None, _('list state of files needing merge')),
3358 3362 ('m', 'mark', None, _('mark files as resolved')),
3359 3363 ('u', 'unmark', None, _('unmark files as resolved'))]
3360 3364 + walkopts,
3361 3365 _('[OPTION]... [FILE]...')),
3362 3366 "revert":
3363 3367 (revert,
3364 3368 [('a', 'all', None, _('revert all changes when no arguments given')),
3365 3369 ('d', 'date', '', _('tipmost revision matching date')),
3366 3370 ('r', 'rev', '', _('revision to revert to')),
3367 3371 ('', 'no-backup', None, _('do not save backup copies of files')),
3368 3372 ] + walkopts + dryrunopts,
3369 3373 _('[OPTION]... [-r REV] [NAME]...')),
3370 3374 "rollback": (rollback, []),
3371 3375 "root": (root, []),
3372 3376 "^serve":
3373 3377 (serve,
3374 3378 [('A', 'accesslog', '', _('name of access log file to write to')),
3375 3379 ('d', 'daemon', None, _('run server in background')),
3376 3380 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3377 3381 ('E', 'errorlog', '', _('name of error log file to write to')),
3378 3382 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3379 3383 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3380 3384 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3381 3385 ('n', 'name', '',
3382 3386 _('name to show in web pages (default: working directory)')),
3383 3387 ('', 'webdir-conf', '', _('name of the webdir config file'
3384 3388 ' (serve more than one repository)')),
3385 3389 ('', 'pid-file', '', _('name of file to write process ID to')),
3386 3390 ('', 'stdio', None, _('for remote clients')),
3387 3391 ('t', 'templates', '', _('web templates to use')),
3388 3392 ('', 'style', '', _('template style to use')),
3389 3393 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3390 3394 ('', 'certificate', '', _('SSL certificate file'))],
3391 3395 _('[OPTION]...')),
3392 3396 "showconfig|debugconfig":
3393 3397 (showconfig,
3394 3398 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3395 3399 _('[-u] [NAME]...')),
3396 3400 "^status|st":
3397 3401 (status,
3398 3402 [('A', 'all', None, _('show status of all files')),
3399 3403 ('m', 'modified', None, _('show only modified files')),
3400 3404 ('a', 'added', None, _('show only added files')),
3401 3405 ('r', 'removed', None, _('show only removed files')),
3402 3406 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3403 3407 ('c', 'clean', None, _('show only files without changes')),
3404 3408 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3405 3409 ('i', 'ignored', None, _('show only ignored files')),
3406 3410 ('n', 'no-status', None, _('hide status prefix')),
3407 3411 ('C', 'copies', None, _('show source of copied files')),
3408 3412 ('0', 'print0', None,
3409 3413 _('end filenames with NUL, for use with xargs')),
3410 3414 ('', 'rev', [], _('show difference from revision')),
3411 3415 ] + walkopts,
3412 3416 _('[OPTION]... [FILE]...')),
3413 3417 "tag":
3414 3418 (tag,
3415 3419 [('f', 'force', None, _('replace existing tag')),
3416 3420 ('l', 'local', None, _('make the tag local')),
3417 3421 ('r', 'rev', '', _('revision to tag')),
3418 3422 ('', 'remove', None, _('remove a tag')),
3419 3423 # -l/--local is already there, commitopts cannot be used
3420 3424 ('m', 'message', '', _('use <text> as commit message')),
3421 3425 ] + commitopts2,
3422 3426 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3423 3427 "tags": (tags, []),
3424 3428 "tip":
3425 3429 (tip,
3426 3430 [('p', 'patch', None, _('show patch')),
3427 3431 ('g', 'git', None, _('use git extended diff format')),
3428 3432 ] + templateopts,
3429 3433 _('[-p]')),
3430 3434 "unbundle":
3431 3435 (unbundle,
3432 3436 [('u', 'update', None,
3433 3437 _('update to new tip if changesets were unbundled'))],
3434 3438 _('[-u] FILE...')),
3435 3439 "^update|up|checkout|co":
3436 3440 (update,
3437 3441 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3438 3442 ('d', 'date', '', _('tipmost revision matching date')),
3439 3443 ('r', 'rev', '', _('revision'))],
3440 3444 _('[-C] [-d DATE] [[-r] REV]')),
3441 3445 "verify": (verify, []),
3442 3446 "version": (version_, []),
3443 3447 }
3444 3448
3445 3449 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3446 3450 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3447 3451 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,2133 +1,2104 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import lock, transaction, store, encoding
13 13 import util, extensions, hook, error
14 14 import match as match_
15 15 import merge as merge_
16 16 from lock import release
17 17 import weakref, stat, errno, os, time, inspect
18 18 propertycache = util.propertycache
19 19
20 20 class localrepository(repo.repository):
21 21 capabilities = set(('lookup', 'changegroupsubset'))
22 22 supported = set('revlogv1 store fncache'.split())
23 23
24 24 def __init__(self, baseui, path=None, create=0):
25 25 repo.repository.__init__(self)
26 26 self.root = os.path.realpath(path)
27 27 self.path = os.path.join(self.root, ".hg")
28 28 self.origroot = path
29 29 self.opener = util.opener(self.path)
30 30 self.wopener = util.opener(self.root)
31 31
32 32 if not os.path.isdir(self.path):
33 33 if create:
34 34 if not os.path.exists(path):
35 35 os.mkdir(path)
36 36 os.mkdir(self.path)
37 37 requirements = ["revlogv1"]
38 38 if baseui.configbool('format', 'usestore', True):
39 39 os.mkdir(os.path.join(self.path, "store"))
40 40 requirements.append("store")
41 41 if baseui.configbool('format', 'usefncache', True):
42 42 requirements.append("fncache")
43 43 # create an invalid changelog
44 44 self.opener("00changelog.i", "a").write(
45 45 '\0\0\0\2' # represents revlogv2
46 46 ' dummy changelog to prevent using the old repo layout'
47 47 )
48 48 reqfile = self.opener("requires", "w")
49 49 for r in requirements:
50 50 reqfile.write("%s\n" % r)
51 51 reqfile.close()
52 52 else:
53 53 raise error.RepoError(_("repository %s not found") % path)
54 54 elif create:
55 55 raise error.RepoError(_("repository %s already exists") % path)
56 56 else:
57 57 # find requirements
58 58 requirements = set()
59 59 try:
60 60 requirements = set(self.opener("requires").read().splitlines())
61 61 except IOError, inst:
62 62 if inst.errno != errno.ENOENT:
63 63 raise
64 64 for r in requirements - self.supported:
65 65 raise error.RepoError(_("requirement '%s' not supported") % r)
66 66
67 67 self.store = store.store(requirements, self.path, util.opener)
68 68 self.spath = self.store.path
69 69 self.sopener = self.store.opener
70 70 self.sjoin = self.store.join
71 71 self.opener.createmode = self.store.createmode
72 72
73 73 self.baseui = baseui
74 74 self.ui = baseui.copy()
75 75 try:
76 76 self.ui.readconfig(self.join("hgrc"), self.root)
77 77 extensions.loadall(self.ui)
78 78 except IOError:
79 79 pass
80 80
81 81 self.tagscache = None
82 82 self._tagstypecache = None
83 83 self.branchcache = None
84 84 self._ubranchcache = None # UTF-8 version of branchcache
85 85 self._branchcachetip = None
86 86 self.nodetagscache = None
87 87 self.filterpats = {}
88 88 self._datafilters = {}
89 89 self._transref = self._lockref = self._wlockref = None
90 90
91 91 @propertycache
92 92 def changelog(self):
93 93 c = changelog.changelog(self.sopener)
94 94 if 'HG_PENDING' in os.environ:
95 95 p = os.environ['HG_PENDING']
96 96 if p.startswith(self.root):
97 97 c.readpending('00changelog.i.a')
98 98 self.sopener.defversion = c.version
99 99 return c
100 100
101 101 @propertycache
102 102 def manifest(self):
103 103 return manifest.manifest(self.sopener)
104 104
105 105 @propertycache
106 106 def dirstate(self):
107 107 return dirstate.dirstate(self.opener, self.ui, self.root)
108 108
109 109 def __getitem__(self, changeid):
110 110 if changeid == None:
111 111 return context.workingctx(self)
112 112 return context.changectx(self, changeid)
113 113
114 114 def __nonzero__(self):
115 115 return True
116 116
117 117 def __len__(self):
118 118 return len(self.changelog)
119 119
120 120 def __iter__(self):
121 121 for i in xrange(len(self)):
122 122 yield i
123 123
124 124 def url(self):
125 125 return 'file:' + self.root
126 126
127 127 def hook(self, name, throw=False, **args):
128 128 return hook.hook(self.ui, self, name, throw, **args)
129 129
130 130 tag_disallowed = ':\r\n'
131 131
132 132 def _tag(self, names, node, message, local, user, date, extra={}):
133 133 if isinstance(names, str):
134 134 allchars = names
135 135 names = (names,)
136 136 else:
137 137 allchars = ''.join(names)
138 138 for c in self.tag_disallowed:
139 139 if c in allchars:
140 140 raise util.Abort(_('%r cannot be used in a tag name') % c)
141 141
142 142 for name in names:
143 143 self.hook('pretag', throw=True, node=hex(node), tag=name,
144 144 local=local)
145 145
146 146 def writetags(fp, names, munge, prevtags):
147 147 fp.seek(0, 2)
148 148 if prevtags and prevtags[-1] != '\n':
149 149 fp.write('\n')
150 150 for name in names:
151 151 m = munge and munge(name) or name
152 152 if self._tagstypecache and name in self._tagstypecache:
153 153 old = self.tagscache.get(name, nullid)
154 154 fp.write('%s %s\n' % (hex(old), m))
155 155 fp.write('%s %s\n' % (hex(node), m))
156 156 fp.close()
157 157
158 158 prevtags = ''
159 159 if local:
160 160 try:
161 161 fp = self.opener('localtags', 'r+')
162 162 except IOError:
163 163 fp = self.opener('localtags', 'a')
164 164 else:
165 165 prevtags = fp.read()
166 166
167 167 # local tags are stored in the current charset
168 168 writetags(fp, names, None, prevtags)
169 169 for name in names:
170 170 self.hook('tag', node=hex(node), tag=name, local=local)
171 171 return
172 172
173 173 try:
174 174 fp = self.wfile('.hgtags', 'rb+')
175 175 except IOError:
176 176 fp = self.wfile('.hgtags', 'ab')
177 177 else:
178 178 prevtags = fp.read()
179 179
180 180 # committed tags are stored in UTF-8
181 181 writetags(fp, names, encoding.fromlocal, prevtags)
182 182
183 183 if '.hgtags' not in self.dirstate:
184 184 self.add(['.hgtags'])
185 185
186 186 tagnode = self.commit(['.hgtags'], message, user, date, extra=extra)
187 187
188 188 for name in names:
189 189 self.hook('tag', node=hex(node), tag=name, local=local)
190 190
191 191 return tagnode
192 192
193 193 def tag(self, names, node, message, local, user, date):
194 194 '''tag a revision with one or more symbolic names.
195 195
196 196 names is a list of strings or, when adding a single tag, names may be a
197 197 string.
198 198
199 199 if local is True, the tags are stored in a per-repository file.
200 200 otherwise, they are stored in the .hgtags file, and a new
201 201 changeset is committed with the change.
202 202
203 203 keyword arguments:
204 204
205 205 local: whether to store tags in non-version-controlled file
206 206 (default False)
207 207
208 208 message: commit message to use if committing
209 209
210 210 user: name of user to use if committing
211 211
212 212 date: date tuple to use if committing'''
213 213
214 214 for x in self.status()[:5]:
215 215 if '.hgtags' in x:
216 216 raise util.Abort(_('working copy of .hgtags is changed '
217 217 '(please commit .hgtags manually)'))
218 218
219 219 self.tags() # instantiate the cache
220 220 self._tag(names, node, message, local, user, date)
221 221
222 222 def tags(self):
223 223 '''return a mapping of tag to node'''
224 224 if self.tagscache:
225 225 return self.tagscache
226 226
227 227 globaltags = {}
228 228 tagtypes = {}
229 229
230 230 def readtags(lines, fn, tagtype):
231 231 filetags = {}
232 232 count = 0
233 233
234 234 def warn(msg):
235 235 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
236 236
237 237 for l in lines:
238 238 count += 1
239 239 if not l:
240 240 continue
241 241 s = l.split(" ", 1)
242 242 if len(s) != 2:
243 243 warn(_("cannot parse entry"))
244 244 continue
245 245 node, key = s
246 246 key = encoding.tolocal(key.strip()) # stored in UTF-8
247 247 try:
248 248 bin_n = bin(node)
249 249 except TypeError:
250 250 warn(_("node '%s' is not well formed") % node)
251 251 continue
252 252 if bin_n not in self.changelog.nodemap:
253 253 warn(_("tag '%s' refers to unknown node") % key)
254 254 continue
255 255
256 256 h = []
257 257 if key in filetags:
258 258 n, h = filetags[key]
259 259 h.append(n)
260 260 filetags[key] = (bin_n, h)
261 261
262 262 for k, nh in filetags.iteritems():
263 263 if k not in globaltags:
264 264 globaltags[k] = nh
265 265 tagtypes[k] = tagtype
266 266 continue
267 267
268 268 # we prefer the global tag if:
269 269 # it supercedes us OR
270 270 # mutual supercedes and it has a higher rank
271 271 # otherwise we win because we're tip-most
272 272 an, ah = nh
273 273 bn, bh = globaltags[k]
274 274 if (bn != an and an in bh and
275 275 (bn not in ah or len(bh) > len(ah))):
276 276 an = bn
277 277 ah.extend([n for n in bh if n not in ah])
278 278 globaltags[k] = an, ah
279 279 tagtypes[k] = tagtype
280 280
281 281 # read the tags file from each head, ending with the tip
282 282 f = None
283 283 for rev, node, fnode in self._hgtagsnodes():
284 284 f = (f and f.filectx(fnode) or
285 285 self.filectx('.hgtags', fileid=fnode))
286 286 readtags(f.data().splitlines(), f, "global")
287 287
288 288 try:
289 289 data = encoding.fromlocal(self.opener("localtags").read())
290 290 # localtags are stored in the local character set
291 291 # while the internal tag table is stored in UTF-8
292 292 readtags(data.splitlines(), "localtags", "local")
293 293 except IOError:
294 294 pass
295 295
296 296 self.tagscache = {}
297 297 self._tagstypecache = {}
298 298 for k, nh in globaltags.iteritems():
299 299 n = nh[0]
300 300 if n != nullid:
301 301 self.tagscache[k] = n
302 302 self._tagstypecache[k] = tagtypes[k]
303 303 self.tagscache['tip'] = self.changelog.tip()
304 304 return self.tagscache
305 305
306 306 def tagtype(self, tagname):
307 307 '''
308 308 return the type of the given tag. result can be:
309 309
310 310 'local' : a local tag
311 311 'global' : a global tag
312 312 None : tag does not exist
313 313 '''
314 314
315 315 self.tags()
316 316
317 317 return self._tagstypecache.get(tagname)
318 318
319 319 def _hgtagsnodes(self):
320 320 last = {}
321 321 ret = []
322 322 for node in reversed(self.heads()):
323 323 c = self[node]
324 324 rev = c.rev()
325 325 try:
326 326 fnode = c.filenode('.hgtags')
327 327 except error.LookupError:
328 328 continue
329 329 ret.append((rev, node, fnode))
330 330 if fnode in last:
331 331 ret[last[fnode]] = None
332 332 last[fnode] = len(ret) - 1
333 333 return [item for item in ret if item]
334 334
335 335 def tagslist(self):
336 336 '''return a list of tags ordered by revision'''
337 337 l = []
338 338 for t, n in self.tags().iteritems():
339 339 try:
340 340 r = self.changelog.rev(n)
341 341 except:
342 342 r = -2 # sort to the beginning of the list if unknown
343 343 l.append((r, t, n))
344 344 return [(t, n) for r, t, n in sorted(l)]
345 345
346 346 def nodetags(self, node):
347 347 '''return the tags associated with a node'''
348 348 if not self.nodetagscache:
349 349 self.nodetagscache = {}
350 350 for t, n in self.tags().iteritems():
351 351 self.nodetagscache.setdefault(n, []).append(t)
352 352 return self.nodetagscache.get(node, [])
353 353
354 354 def _branchtags(self, partial, lrev):
355 355 # TODO: rename this function?
356 356 tiprev = len(self) - 1
357 357 if lrev != tiprev:
358 358 self._updatebranchcache(partial, lrev+1, tiprev+1)
359 359 self._writebranchcache(partial, self.changelog.tip(), tiprev)
360 360
361 361 return partial
362 362
363 363 def _branchheads(self):
364 364 tip = self.changelog.tip()
365 365 if self.branchcache is not None and self._branchcachetip == tip:
366 366 return self.branchcache
367 367
368 368 oldtip = self._branchcachetip
369 369 self._branchcachetip = tip
370 370 if self.branchcache is None:
371 371 self.branchcache = {} # avoid recursion in changectx
372 372 else:
373 373 self.branchcache.clear() # keep using the same dict
374 374 if oldtip is None or oldtip not in self.changelog.nodemap:
375 375 partial, last, lrev = self._readbranchcache()
376 376 else:
377 377 lrev = self.changelog.rev(oldtip)
378 378 partial = self._ubranchcache
379 379
380 380 self._branchtags(partial, lrev)
381 381 # this private cache holds all heads (not just tips)
382 382 self._ubranchcache = partial
383 383
384 384 # the branch cache is stored on disk as UTF-8, but in the local
385 385 # charset internally
386 386 for k, v in partial.iteritems():
387 387 self.branchcache[encoding.tolocal(k)] = v
388 388 return self.branchcache
389 389
390 390
391 391 def branchtags(self):
392 392 '''return a dict where branch names map to the tipmost head of
393 393 the branch, open heads come before closed'''
394 394 bt = {}
395 395 for bn, heads in self._branchheads().iteritems():
396 396 head = None
397 397 for i in range(len(heads)-1, -1, -1):
398 398 h = heads[i]
399 399 if 'close' not in self.changelog.read(h)[5]:
400 400 head = h
401 401 break
402 402 # no open heads were found
403 403 if head is None:
404 404 head = heads[-1]
405 405 bt[bn] = head
406 406 return bt
407 407
408 408
409 409 def _readbranchcache(self):
410 410 partial = {}
411 411 try:
412 412 f = self.opener("branchheads.cache")
413 413 lines = f.read().split('\n')
414 414 f.close()
415 415 except (IOError, OSError):
416 416 return {}, nullid, nullrev
417 417
418 418 try:
419 419 last, lrev = lines.pop(0).split(" ", 1)
420 420 last, lrev = bin(last), int(lrev)
421 421 if lrev >= len(self) or self[lrev].node() != last:
422 422 # invalidate the cache
423 423 raise ValueError('invalidating branch cache (tip differs)')
424 424 for l in lines:
425 425 if not l: continue
426 426 node, label = l.split(" ", 1)
427 427 partial.setdefault(label.strip(), []).append(bin(node))
428 428 except KeyboardInterrupt:
429 429 raise
430 430 except Exception, inst:
431 431 if self.ui.debugflag:
432 432 self.ui.warn(str(inst), '\n')
433 433 partial, last, lrev = {}, nullid, nullrev
434 434 return partial, last, lrev
435 435
436 436 def _writebranchcache(self, branches, tip, tiprev):
437 437 try:
438 438 f = self.opener("branchheads.cache", "w", atomictemp=True)
439 439 f.write("%s %s\n" % (hex(tip), tiprev))
440 440 for label, nodes in branches.iteritems():
441 441 for node in nodes:
442 442 f.write("%s %s\n" % (hex(node), label))
443 443 f.rename()
444 444 except (IOError, OSError):
445 445 pass
446 446
447 447 def _updatebranchcache(self, partial, start, end):
448 448 for r in xrange(start, end):
449 449 c = self[r]
450 450 b = c.branch()
451 451 bheads = partial.setdefault(b, [])
452 452 bheads.append(c.node())
453 453 for p in c.parents():
454 454 pn = p.node()
455 455 if pn in bheads:
456 456 bheads.remove(pn)
457 457
458 458 def lookup(self, key):
459 459 if isinstance(key, int):
460 460 return self.changelog.node(key)
461 461 elif key == '.':
462 462 return self.dirstate.parents()[0]
463 463 elif key == 'null':
464 464 return nullid
465 465 elif key == 'tip':
466 466 return self.changelog.tip()
467 467 n = self.changelog._match(key)
468 468 if n:
469 469 return n
470 470 if key in self.tags():
471 471 return self.tags()[key]
472 472 if key in self.branchtags():
473 473 return self.branchtags()[key]
474 474 n = self.changelog._partialmatch(key)
475 475 if n:
476 476 return n
477 477 try:
478 478 if len(key) == 20:
479 479 key = hex(key)
480 480 except:
481 481 pass
482 482 raise error.RepoError(_("unknown revision '%s'") % key)
483 483
484 484 def local(self):
485 485 return True
486 486
487 487 def join(self, f):
488 488 return os.path.join(self.path, f)
489 489
490 490 def wjoin(self, f):
491 491 return os.path.join(self.root, f)
492 492
493 493 def rjoin(self, f):
494 494 return os.path.join(self.root, util.pconvert(f))
495 495
496 496 def file(self, f):
497 497 if f[0] == '/':
498 498 f = f[1:]
499 499 return filelog.filelog(self.sopener, f)
500 500
501 501 def changectx(self, changeid):
502 502 return self[changeid]
503 503
504 504 def parents(self, changeid=None):
505 505 '''get list of changectxs for parents of changeid'''
506 506 return self[changeid].parents()
507 507
508 508 def filectx(self, path, changeid=None, fileid=None):
509 509 """changeid can be a changeset revision, node, or tag.
510 510 fileid can be a file revision or node."""
511 511 return context.filectx(self, path, changeid, fileid)
512 512
513 513 def getcwd(self):
514 514 return self.dirstate.getcwd()
515 515
516 516 def pathto(self, f, cwd=None):
517 517 return self.dirstate.pathto(f, cwd)
518 518
519 519 def wfile(self, f, mode='r'):
520 520 return self.wopener(f, mode)
521 521
522 522 def _link(self, f):
523 523 return os.path.islink(self.wjoin(f))
524 524
525 525 def _filter(self, filter, filename, data):
526 526 if filter not in self.filterpats:
527 527 l = []
528 528 for pat, cmd in self.ui.configitems(filter):
529 529 if cmd == '!':
530 530 continue
531 531 mf = util.matcher(self.root, "", [pat], [], [])[1]
532 532 fn = None
533 533 params = cmd
534 534 for name, filterfn in self._datafilters.iteritems():
535 535 if cmd.startswith(name):
536 536 fn = filterfn
537 537 params = cmd[len(name):].lstrip()
538 538 break
539 539 if not fn:
540 540 fn = lambda s, c, **kwargs: util.filter(s, c)
541 541 # Wrap old filters not supporting keyword arguments
542 542 if not inspect.getargspec(fn)[2]:
543 543 oldfn = fn
544 544 fn = lambda s, c, **kwargs: oldfn(s, c)
545 545 l.append((mf, fn, params))
546 546 self.filterpats[filter] = l
547 547
548 548 for mf, fn, cmd in self.filterpats[filter]:
549 549 if mf(filename):
550 550 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
551 551 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
552 552 break
553 553
554 554 return data
555 555
556 556 def adddatafilter(self, name, filter):
557 557 self._datafilters[name] = filter
558 558
559 559 def wread(self, filename):
560 560 if self._link(filename):
561 561 data = os.readlink(self.wjoin(filename))
562 562 else:
563 563 data = self.wopener(filename, 'r').read()
564 564 return self._filter("encode", filename, data)
565 565
566 566 def wwrite(self, filename, data, flags):
567 567 data = self._filter("decode", filename, data)
568 568 try:
569 569 os.unlink(self.wjoin(filename))
570 570 except OSError:
571 571 pass
572 572 if 'l' in flags:
573 573 self.wopener.symlink(data, filename)
574 574 else:
575 575 self.wopener(filename, 'w').write(data)
576 576 if 'x' in flags:
577 577 util.set_flags(self.wjoin(filename), False, True)
578 578
579 579 def wwritedata(self, filename, data):
580 580 return self._filter("decode", filename, data)
581 581
582 582 def transaction(self):
583 583 tr = self._transref and self._transref() or None
584 584 if tr and tr.running():
585 585 return tr.nest()
586 586
587 587 # abort here if the journal already exists
588 588 if os.path.exists(self.sjoin("journal")):
589 589 raise error.RepoError(_("journal already exists - run hg recover"))
590 590
591 591 # save dirstate for rollback
592 592 try:
593 593 ds = self.opener("dirstate").read()
594 594 except IOError:
595 595 ds = ""
596 596 self.opener("journal.dirstate", "w").write(ds)
597 597 self.opener("journal.branch", "w").write(self.dirstate.branch())
598 598
599 599 renames = [(self.sjoin("journal"), self.sjoin("undo")),
600 600 (self.join("journal.dirstate"), self.join("undo.dirstate")),
601 601 (self.join("journal.branch"), self.join("undo.branch"))]
602 602 tr = transaction.transaction(self.ui.warn, self.sopener,
603 603 self.sjoin("journal"),
604 604 aftertrans(renames),
605 605 self.store.createmode)
606 606 self._transref = weakref.ref(tr)
607 607 return tr
608 608
609 609 def recover(self):
610 610 lock = self.lock()
611 611 try:
612 612 if os.path.exists(self.sjoin("journal")):
613 613 self.ui.status(_("rolling back interrupted transaction\n"))
614 614 transaction.rollback(self.sopener, self.sjoin("journal"), self.ui.warn)
615 615 self.invalidate()
616 616 return True
617 617 else:
618 618 self.ui.warn(_("no interrupted transaction available\n"))
619 619 return False
620 620 finally:
621 621 lock.release()
622 622
623 623 def rollback(self):
624 624 wlock = lock = None
625 625 try:
626 626 wlock = self.wlock()
627 627 lock = self.lock()
628 628 if os.path.exists(self.sjoin("undo")):
629 629 self.ui.status(_("rolling back last transaction\n"))
630 630 transaction.rollback(self.sopener, self.sjoin("undo"), self.ui.warn)
631 631 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
632 632 try:
633 633 branch = self.opener("undo.branch").read()
634 634 self.dirstate.setbranch(branch)
635 635 except IOError:
636 636 self.ui.warn(_("Named branch could not be reset, "
637 637 "current branch still is: %s\n")
638 638 % encoding.tolocal(self.dirstate.branch()))
639 639 self.invalidate()
640 640 self.dirstate.invalidate()
641 641 else:
642 642 self.ui.warn(_("no rollback information available\n"))
643 643 finally:
644 644 release(lock, wlock)
645 645
646 646 def invalidate(self):
647 647 for a in "changelog manifest".split():
648 648 if a in self.__dict__:
649 649 delattr(self, a)
650 650 self.tagscache = None
651 651 self._tagstypecache = None
652 652 self.nodetagscache = None
653 653 self.branchcache = None
654 654 self._ubranchcache = None
655 655 self._branchcachetip = None
656 656
657 657 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
658 658 try:
659 659 l = lock.lock(lockname, 0, releasefn, desc=desc)
660 660 except error.LockHeld, inst:
661 661 if not wait:
662 662 raise
663 663 self.ui.warn(_("waiting for lock on %s held by %r\n") %
664 664 (desc, inst.locker))
665 665 # default to 600 seconds timeout
666 666 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
667 667 releasefn, desc=desc)
668 668 if acquirefn:
669 669 acquirefn()
670 670 return l
671 671
672 672 def lock(self, wait=True):
673 673 l = self._lockref and self._lockref()
674 674 if l is not None and l.held:
675 675 l.lock()
676 676 return l
677 677
678 678 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
679 679 _('repository %s') % self.origroot)
680 680 self._lockref = weakref.ref(l)
681 681 return l
682 682
683 683 def wlock(self, wait=True):
684 684 l = self._wlockref and self._wlockref()
685 685 if l is not None and l.held:
686 686 l.lock()
687 687 return l
688 688
689 689 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
690 690 self.dirstate.invalidate, _('working directory of %s') %
691 691 self.origroot)
692 692 self._wlockref = weakref.ref(l)
693 693 return l
694 694
695 695 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
696 696 """
697 697 commit an individual file as part of a larger transaction
698 698 """
699 699
700 700 fname = fctx.path()
701 701 text = fctx.data()
702 702 flog = self.file(fname)
703 703 fparent1 = manifest1.get(fname, nullid)
704 704 fparent2 = fparent2o = manifest2.get(fname, nullid)
705 705
706 706 meta = {}
707 707 copy = fctx.renamed()
708 708 if copy and copy[0] != fname:
709 709 # Mark the new revision of this file as a copy of another
710 710 # file. This copy data will effectively act as a parent
711 711 # of this new revision. If this is a merge, the first
712 712 # parent will be the nullid (meaning "look up the copy data")
713 713 # and the second one will be the other parent. For example:
714 714 #
715 715 # 0 --- 1 --- 3 rev1 changes file foo
716 716 # \ / rev2 renames foo to bar and changes it
717 717 # \- 2 -/ rev3 should have bar with all changes and
718 718 # should record that bar descends from
719 719 # bar in rev2 and foo in rev1
720 720 #
721 721 # this allows this merge to succeed:
722 722 #
723 723 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
724 724 # \ / merging rev3 and rev4 should use bar@rev2
725 725 # \- 2 --- 4 as the merge base
726 726 #
727 727
728 728 cfname = copy[0]
729 729 crev = manifest1.get(cfname)
730 730 newfparent = fparent2
731 731
732 732 if manifest2: # branch merge
733 733 if fparent2 == nullid or crev is None: # copied on remote side
734 734 if cfname in manifest2:
735 735 crev = manifest2[cfname]
736 736 newfparent = fparent1
737 737
738 738 # find source in nearest ancestor if we've lost track
739 739 if not crev:
740 740 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
741 741 (fname, cfname))
742 742 for ancestor in self['.'].ancestors():
743 743 if cfname in ancestor:
744 744 crev = ancestor[cfname].filenode()
745 745 break
746 746
747 747 self.ui.debug(_(" %s: copy %s:%s\n") % (fname, cfname, hex(crev)))
748 748 meta["copy"] = cfname
749 749 meta["copyrev"] = hex(crev)
750 750 fparent1, fparent2 = nullid, newfparent
751 751 elif fparent2 != nullid:
752 752 # is one parent an ancestor of the other?
753 753 fparentancestor = flog.ancestor(fparent1, fparent2)
754 754 if fparentancestor == fparent1:
755 755 fparent1, fparent2 = fparent2, nullid
756 756 elif fparentancestor == fparent2:
757 757 fparent2 = nullid
758 758
759 759 # is the file changed?
760 760 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
761 761 changelist.append(fname)
762 762 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
763 763
764 764 # are just the flags changed during merge?
765 765 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
766 766 changelist.append(fname)
767 767
768 768 return fparent1
769 769
770 770 def commit(self, files=None, text="", user=None, date=None, match=None,
771 force=False, force_editor=False, extra={}, empty_ok=False):
771 force=False, editor=False, extra={}):
772 772 wlock = lock = None
773 773 if extra.get("close"):
774 774 force = True
775 775 if files:
776 776 files = list(set(files))
777 777
778 778 wlock = self.wlock()
779 779 try:
780 780 p1, p2 = self.dirstate.parents()
781 781
782 782 if (not force and p2 != nullid and
783 783 (match and (match.files() or match.anypats()))):
784 784 raise util.Abort(_('cannot partially commit a merge '
785 785 '(do not specify files or patterns)'))
786 786
787 787 if files:
788 788 modified, removed = [], []
789 789 for f in files:
790 790 s = self.dirstate[f]
791 791 if s in 'nma':
792 792 modified.append(f)
793 793 elif s == 'r':
794 794 removed.append(f)
795 795 else:
796 796 self.ui.warn(_("%s not tracked!\n") % f)
797 797 changes = [modified, [], removed, [], []]
798 798 else:
799 799 changes = self.status(match=match)
800 800
801 801 if (not (changes[0] or changes[1] or changes[2])
802 802 and not force and p2 == nullid and
803 803 self[None].branch() == self['.'].branch()):
804 804 self.ui.status(_("nothing changed\n"))
805 805 return None
806 806
807 807 ms = merge_.mergestate(self)
808 808 for f in changes[0]:
809 809 if f in ms and ms[f] == 'u':
810 810 raise util.Abort(_("unresolved merge conflicts "
811 811 "(see hg resolve)"))
812 812 wctx = context.workingctx(self, (p1, p2), text, user, date,
813 813 extra, changes)
814 r = self._commitctx(wctx, force, force_editor, empty_ok, True)
814 r = self._commitctx(wctx, force, editor, True)
815 815 ms.reset()
816 816 return r
817 817
818 818 finally:
819 819 wlock.release()
820 820
821 821 def commitctx(self, ctx):
822 822 """Add a new revision to current repository.
823 823
824 824 Revision information is passed in the context.memctx argument.
825 825 commitctx() does not touch the working directory.
826 826 """
827 return self._commitctx(ctx, force=True, force_editor=False,
828 empty_ok=True, working=False)
827 return self._commitctx(ctx, force=True, editor=None, working=False)
829 828
830 def _commitctx(self, ctx, force=False, force_editor=False, empty_ok=False,
831 working=True):
829 def _commitctx(self, ctx, force=False, editor=None, working=True):
832 830 lock = self.lock()
833 831 tr = None
834 832 valid = 0 # don't save the dirstate if this isn't set
835 833 try:
836 834 commit = sorted(ctx.modified() + ctx.added())
837 835 remove = ctx.removed()
838 836 extra = ctx.extra().copy()
839 837 branchname = extra['branch']
840 838 user = ctx.user()
841 839 text = ctx.description()
842 840
843 841 p1, p2 = [p.node() for p in ctx.parents()]
844 842 c1 = self.changelog.read(p1)
845 843 c2 = self.changelog.read(p2)
846 844 m1 = self.manifest.read(c1[0]).copy()
847 845 m2 = self.manifest.read(c2[0])
848 846
849 847 xp1 = hex(p1)
850 848 if p2 == nullid: xp2 = ''
851 849 else: xp2 = hex(p2)
852 850
853 851 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
854 852
855 853 tr = self.transaction()
856 854 trp = weakref.proxy(tr)
857 855
858 856 # check in files
859 857 new = {}
860 858 changed = []
861 859 linkrev = len(self)
862 860 for f in commit:
863 861 self.ui.note(f + "\n")
864 862 try:
865 863 fctx = ctx[f]
866 864 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
867 865 changed)
868 866 m1.set(f, fctx.flags())
869 867 if working:
870 868 self.dirstate.normal(f)
871 869
872 870 except (OSError, IOError):
873 871 if working:
874 872 self.ui.warn(_("trouble committing %s!\n") % f)
875 873 raise
876 874 else:
877 875 remove.append(f)
878 876
879 877 updated, added = [], []
880 878 for f in sorted(changed):
881 879 if f in m1 or f in m2:
882 880 updated.append(f)
883 881 else:
884 882 added.append(f)
885 883
886 884 # update manifest
887 885 m1.update(new)
888 886 removed = [f for f in sorted(remove) if f in m1 or f in m2]
889 887 removed1 = []
890 888
891 889 for f in removed:
892 890 if f in m1:
893 891 del m1[f]
894 892 removed1.append(f)
895 893 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
896 894 (new, removed1))
897 895
898 # add changeset
899 if (not empty_ok and not text) or force_editor:
900 edittext = []
901 if text:
902 edittext.append(text)
903 edittext.append("")
904 edittext.append("") # Empty line between message and comments.
905 edittext.append(_("HG: Enter commit message."
906 " Lines beginning with 'HG:' are removed."))
907 edittext.append("HG: --")
908 edittext.append(_("HG: user: %s") % user)
909 if p2 != nullid:
910 edittext.append(_("HG: branch merge"))
911 if branchname:
912 edittext.append(_("HG: branch '%s'")
913 % encoding.tolocal(branchname))
914 edittext.extend([_("HG: added %s") % f for f in added])
915 edittext.extend([_("HG: changed %s") % f for f in updated])
916 edittext.extend([_("HG: removed %s") % f for f in removed])
917 if not added and not updated and not removed:
918 edittext.append(_("HG: no files changed"))
919 edittext.append("")
920 # run editor in the repository root
921 olddir = os.getcwd()
922 os.chdir(self.root)
923 text = self.ui.edit("\n".join(edittext), user)
924 os.chdir(olddir)
896 if editor:
897 text = editor(self, ctx, added, updated, removed)
925 898
926 899 lines = [line.rstrip() for line in text.rstrip().splitlines()]
927 900 while lines and not lines[0]:
928 901 del lines[0]
929 if not lines and working:
930 raise util.Abort(_("empty commit message"))
931 902 text = '\n'.join(lines)
932 903
933 904 self.changelog.delayupdate()
934 905 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
935 906 user, ctx.date(), extra)
936 907 p = lambda: self.changelog.writepending() and self.root or ""
937 908 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
938 909 parent2=xp2, pending=p)
939 910 self.changelog.finalize(trp)
940 911 tr.close()
941 912
942 913 if self.branchcache:
943 914 self.branchtags()
944 915
945 916 if working:
946 917 self.dirstate.setparents(n)
947 918 for f in removed:
948 919 self.dirstate.forget(f)
949 920 valid = 1 # our dirstate updates are complete
950 921
951 922 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
952 923 return n
953 924 finally:
954 925 if not valid: # don't save our updated dirstate
955 926 self.dirstate.invalidate()
956 927 del tr
957 928 lock.release()
958 929
959 930 def walk(self, match, node=None):
960 931 '''
961 932 walk recursively through the directory tree or a given
962 933 changeset, finding all files matched by the match
963 934 function
964 935 '''
965 936 return self[node].walk(match)
966 937
967 938 def status(self, node1='.', node2=None, match=None,
968 939 ignored=False, clean=False, unknown=False):
969 940 """return status of files between two nodes or node and working directory
970 941
971 942 If node1 is None, use the first dirstate parent instead.
972 943 If node2 is None, compare node1 with working directory.
973 944 """
974 945
975 946 def mfmatches(ctx):
976 947 mf = ctx.manifest().copy()
977 948 for fn in mf.keys():
978 949 if not match(fn):
979 950 del mf[fn]
980 951 return mf
981 952
982 953 if isinstance(node1, context.changectx):
983 954 ctx1 = node1
984 955 else:
985 956 ctx1 = self[node1]
986 957 if isinstance(node2, context.changectx):
987 958 ctx2 = node2
988 959 else:
989 960 ctx2 = self[node2]
990 961
991 962 working = ctx2.rev() is None
992 963 parentworking = working and ctx1 == self['.']
993 964 match = match or match_.always(self.root, self.getcwd())
994 965 listignored, listclean, listunknown = ignored, clean, unknown
995 966
996 967 # load earliest manifest first for caching reasons
997 968 if not working and ctx2.rev() < ctx1.rev():
998 969 ctx2.manifest()
999 970
1000 971 if not parentworking:
1001 972 def bad(f, msg):
1002 973 if f not in ctx1:
1003 974 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1004 975 return False
1005 976 match.bad = bad
1006 977
1007 978 if working: # we need to scan the working dir
1008 979 s = self.dirstate.status(match, listignored, listclean, listunknown)
1009 980 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1010 981
1011 982 # check for any possibly clean files
1012 983 if parentworking and cmp:
1013 984 fixup = []
1014 985 # do a full compare of any files that might have changed
1015 986 for f in sorted(cmp):
1016 987 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1017 988 or ctx1[f].cmp(ctx2[f].data())):
1018 989 modified.append(f)
1019 990 else:
1020 991 fixup.append(f)
1021 992
1022 993 if listclean:
1023 994 clean += fixup
1024 995
1025 996 # update dirstate for files that are actually clean
1026 997 if fixup:
1027 998 wlock = None
1028 999 try:
1029 1000 try:
1030 1001 # updating the dirstate is optional
1031 1002 # so we don't wait on the lock
1032 1003 wlock = self.wlock(False)
1033 1004 for f in fixup:
1034 1005 self.dirstate.normal(f)
1035 1006 except error.LockError:
1036 1007 pass
1037 1008 finally:
1038 1009 release(wlock)
1039 1010
1040 1011 if not parentworking:
1041 1012 mf1 = mfmatches(ctx1)
1042 1013 if working:
1043 1014 # we are comparing working dir against non-parent
1044 1015 # generate a pseudo-manifest for the working dir
1045 1016 mf2 = mfmatches(self['.'])
1046 1017 for f in cmp + modified + added:
1047 1018 mf2[f] = None
1048 1019 mf2.set(f, ctx2.flags(f))
1049 1020 for f in removed:
1050 1021 if f in mf2:
1051 1022 del mf2[f]
1052 1023 else:
1053 1024 # we are comparing two revisions
1054 1025 deleted, unknown, ignored = [], [], []
1055 1026 mf2 = mfmatches(ctx2)
1056 1027
1057 1028 modified, added, clean = [], [], []
1058 1029 for fn in mf2:
1059 1030 if fn in mf1:
1060 1031 if (mf1.flags(fn) != mf2.flags(fn) or
1061 1032 (mf1[fn] != mf2[fn] and
1062 1033 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1063 1034 modified.append(fn)
1064 1035 elif listclean:
1065 1036 clean.append(fn)
1066 1037 del mf1[fn]
1067 1038 else:
1068 1039 added.append(fn)
1069 1040 removed = mf1.keys()
1070 1041
1071 1042 r = modified, added, removed, deleted, unknown, ignored, clean
1072 1043 [l.sort() for l in r]
1073 1044 return r
1074 1045
1075 1046 def add(self, list):
1076 1047 wlock = self.wlock()
1077 1048 try:
1078 1049 rejected = []
1079 1050 for f in list:
1080 1051 p = self.wjoin(f)
1081 1052 try:
1082 1053 st = os.lstat(p)
1083 1054 except:
1084 1055 self.ui.warn(_("%s does not exist!\n") % f)
1085 1056 rejected.append(f)
1086 1057 continue
1087 1058 if st.st_size > 10000000:
1088 1059 self.ui.warn(_("%s: files over 10MB may cause memory and"
1089 1060 " performance problems\n"
1090 1061 "(use 'hg revert %s' to unadd the file)\n")
1091 1062 % (f, f))
1092 1063 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1093 1064 self.ui.warn(_("%s not added: only files and symlinks "
1094 1065 "supported currently\n") % f)
1095 1066 rejected.append(p)
1096 1067 elif self.dirstate[f] in 'amn':
1097 1068 self.ui.warn(_("%s already tracked!\n") % f)
1098 1069 elif self.dirstate[f] == 'r':
1099 1070 self.dirstate.normallookup(f)
1100 1071 else:
1101 1072 self.dirstate.add(f)
1102 1073 return rejected
1103 1074 finally:
1104 1075 wlock.release()
1105 1076
1106 1077 def forget(self, list):
1107 1078 wlock = self.wlock()
1108 1079 try:
1109 1080 for f in list:
1110 1081 if self.dirstate[f] != 'a':
1111 1082 self.ui.warn(_("%s not added!\n") % f)
1112 1083 else:
1113 1084 self.dirstate.forget(f)
1114 1085 finally:
1115 1086 wlock.release()
1116 1087
1117 1088 def remove(self, list, unlink=False):
1118 1089 wlock = None
1119 1090 try:
1120 1091 if unlink:
1121 1092 for f in list:
1122 1093 try:
1123 1094 util.unlink(self.wjoin(f))
1124 1095 except OSError, inst:
1125 1096 if inst.errno != errno.ENOENT:
1126 1097 raise
1127 1098 wlock = self.wlock()
1128 1099 for f in list:
1129 1100 if unlink and os.path.exists(self.wjoin(f)):
1130 1101 self.ui.warn(_("%s still exists!\n") % f)
1131 1102 elif self.dirstate[f] == 'a':
1132 1103 self.dirstate.forget(f)
1133 1104 elif f not in self.dirstate:
1134 1105 self.ui.warn(_("%s not tracked!\n") % f)
1135 1106 else:
1136 1107 self.dirstate.remove(f)
1137 1108 finally:
1138 1109 release(wlock)
1139 1110
1140 1111 def undelete(self, list):
1141 1112 manifests = [self.manifest.read(self.changelog.read(p)[0])
1142 1113 for p in self.dirstate.parents() if p != nullid]
1143 1114 wlock = self.wlock()
1144 1115 try:
1145 1116 for f in list:
1146 1117 if self.dirstate[f] != 'r':
1147 1118 self.ui.warn(_("%s not removed!\n") % f)
1148 1119 else:
1149 1120 m = f in manifests[0] and manifests[0] or manifests[1]
1150 1121 t = self.file(f).read(m[f])
1151 1122 self.wwrite(f, t, m.flags(f))
1152 1123 self.dirstate.normal(f)
1153 1124 finally:
1154 1125 wlock.release()
1155 1126
1156 1127 def copy(self, source, dest):
1157 1128 p = self.wjoin(dest)
1158 1129 if not (os.path.exists(p) or os.path.islink(p)):
1159 1130 self.ui.warn(_("%s does not exist!\n") % dest)
1160 1131 elif not (os.path.isfile(p) or os.path.islink(p)):
1161 1132 self.ui.warn(_("copy failed: %s is not a file or a "
1162 1133 "symbolic link\n") % dest)
1163 1134 else:
1164 1135 wlock = self.wlock()
1165 1136 try:
1166 1137 if self.dirstate[dest] in '?r':
1167 1138 self.dirstate.add(dest)
1168 1139 self.dirstate.copy(source, dest)
1169 1140 finally:
1170 1141 wlock.release()
1171 1142
1172 1143 def heads(self, start=None, closed=True):
1173 1144 heads = self.changelog.heads(start)
1174 1145 def display(head):
1175 1146 if closed:
1176 1147 return True
1177 1148 extras = self.changelog.read(head)[5]
1178 1149 return ('close' not in extras)
1179 1150 # sort the output in rev descending order
1180 1151 heads = [(-self.changelog.rev(h), h) for h in heads if display(h)]
1181 1152 return [n for (r, n) in sorted(heads)]
1182 1153
1183 1154 def branchheads(self, branch=None, start=None, closed=True):
1184 1155 if branch is None:
1185 1156 branch = self[None].branch()
1186 1157 branches = self._branchheads()
1187 1158 if branch not in branches:
1188 1159 return []
1189 1160 bheads = branches[branch]
1190 1161 # the cache returns heads ordered lowest to highest
1191 1162 bheads.reverse()
1192 1163 if start is not None:
1193 1164 # filter out the heads that cannot be reached from startrev
1194 1165 bheads = self.changelog.nodesbetween([start], bheads)[2]
1195 1166 if not closed:
1196 1167 bheads = [h for h in bheads if
1197 1168 ('close' not in self.changelog.read(h)[5])]
1198 1169 return bheads
1199 1170
1200 1171 def branches(self, nodes):
1201 1172 if not nodes:
1202 1173 nodes = [self.changelog.tip()]
1203 1174 b = []
1204 1175 for n in nodes:
1205 1176 t = n
1206 1177 while 1:
1207 1178 p = self.changelog.parents(n)
1208 1179 if p[1] != nullid or p[0] == nullid:
1209 1180 b.append((t, n, p[0], p[1]))
1210 1181 break
1211 1182 n = p[0]
1212 1183 return b
1213 1184
1214 1185 def between(self, pairs):
1215 1186 r = []
1216 1187
1217 1188 for top, bottom in pairs:
1218 1189 n, l, i = top, [], 0
1219 1190 f = 1
1220 1191
1221 1192 while n != bottom and n != nullid:
1222 1193 p = self.changelog.parents(n)[0]
1223 1194 if i == f:
1224 1195 l.append(n)
1225 1196 f = f * 2
1226 1197 n = p
1227 1198 i += 1
1228 1199
1229 1200 r.append(l)
1230 1201
1231 1202 return r
1232 1203
1233 1204 def findincoming(self, remote, base=None, heads=None, force=False):
1234 1205 """Return list of roots of the subsets of missing nodes from remote
1235 1206
1236 1207 If base dict is specified, assume that these nodes and their parents
1237 1208 exist on the remote side and that no child of a node of base exists
1238 1209 in both remote and self.
1239 1210 Furthermore base will be updated to include the nodes that exists
1240 1211 in self and remote but no children exists in self and remote.
1241 1212 If a list of heads is specified, return only nodes which are heads
1242 1213 or ancestors of these heads.
1243 1214
1244 1215 All the ancestors of base are in self and in remote.
1245 1216 All the descendants of the list returned are missing in self.
1246 1217 (and so we know that the rest of the nodes are missing in remote, see
1247 1218 outgoing)
1248 1219 """
1249 1220 return self.findcommonincoming(remote, base, heads, force)[1]
1250 1221
1251 1222 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1252 1223 """Return a tuple (common, missing roots, heads) used to identify
1253 1224 missing nodes from remote.
1254 1225
1255 1226 If base dict is specified, assume that these nodes and their parents
1256 1227 exist on the remote side and that no child of a node of base exists
1257 1228 in both remote and self.
1258 1229 Furthermore base will be updated to include the nodes that exists
1259 1230 in self and remote but no children exists in self and remote.
1260 1231 If a list of heads is specified, return only nodes which are heads
1261 1232 or ancestors of these heads.
1262 1233
1263 1234 All the ancestors of base are in self and in remote.
1264 1235 """
1265 1236 m = self.changelog.nodemap
1266 1237 search = []
1267 1238 fetch = set()
1268 1239 seen = set()
1269 1240 seenbranch = set()
1270 1241 if base == None:
1271 1242 base = {}
1272 1243
1273 1244 if not heads:
1274 1245 heads = remote.heads()
1275 1246
1276 1247 if self.changelog.tip() == nullid:
1277 1248 base[nullid] = 1
1278 1249 if heads != [nullid]:
1279 1250 return [nullid], [nullid], list(heads)
1280 1251 return [nullid], [], []
1281 1252
1282 1253 # assume we're closer to the tip than the root
1283 1254 # and start by examining the heads
1284 1255 self.ui.status(_("searching for changes\n"))
1285 1256
1286 1257 unknown = []
1287 1258 for h in heads:
1288 1259 if h not in m:
1289 1260 unknown.append(h)
1290 1261 else:
1291 1262 base[h] = 1
1292 1263
1293 1264 heads = unknown
1294 1265 if not unknown:
1295 1266 return base.keys(), [], []
1296 1267
1297 1268 req = set(unknown)
1298 1269 reqcnt = 0
1299 1270
1300 1271 # search through remote branches
1301 1272 # a 'branch' here is a linear segment of history, with four parts:
1302 1273 # head, root, first parent, second parent
1303 1274 # (a branch always has two parents (or none) by definition)
1304 1275 unknown = remote.branches(unknown)
1305 1276 while unknown:
1306 1277 r = []
1307 1278 while unknown:
1308 1279 n = unknown.pop(0)
1309 1280 if n[0] in seen:
1310 1281 continue
1311 1282
1312 1283 self.ui.debug(_("examining %s:%s\n")
1313 1284 % (short(n[0]), short(n[1])))
1314 1285 if n[0] == nullid: # found the end of the branch
1315 1286 pass
1316 1287 elif n in seenbranch:
1317 1288 self.ui.debug(_("branch already found\n"))
1318 1289 continue
1319 1290 elif n[1] and n[1] in m: # do we know the base?
1320 1291 self.ui.debug(_("found incomplete branch %s:%s\n")
1321 1292 % (short(n[0]), short(n[1])))
1322 1293 search.append(n[0:2]) # schedule branch range for scanning
1323 1294 seenbranch.add(n)
1324 1295 else:
1325 1296 if n[1] not in seen and n[1] not in fetch:
1326 1297 if n[2] in m and n[3] in m:
1327 1298 self.ui.debug(_("found new changeset %s\n") %
1328 1299 short(n[1]))
1329 1300 fetch.add(n[1]) # earliest unknown
1330 1301 for p in n[2:4]:
1331 1302 if p in m:
1332 1303 base[p] = 1 # latest known
1333 1304
1334 1305 for p in n[2:4]:
1335 1306 if p not in req and p not in m:
1336 1307 r.append(p)
1337 1308 req.add(p)
1338 1309 seen.add(n[0])
1339 1310
1340 1311 if r:
1341 1312 reqcnt += 1
1342 1313 self.ui.debug(_("request %d: %s\n") %
1343 1314 (reqcnt, " ".join(map(short, r))))
1344 1315 for p in xrange(0, len(r), 10):
1345 1316 for b in remote.branches(r[p:p+10]):
1346 1317 self.ui.debug(_("received %s:%s\n") %
1347 1318 (short(b[0]), short(b[1])))
1348 1319 unknown.append(b)
1349 1320
1350 1321 # do binary search on the branches we found
1351 1322 while search:
1352 1323 newsearch = []
1353 1324 reqcnt += 1
1354 1325 for n, l in zip(search, remote.between(search)):
1355 1326 l.append(n[1])
1356 1327 p = n[0]
1357 1328 f = 1
1358 1329 for i in l:
1359 1330 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1360 1331 if i in m:
1361 1332 if f <= 2:
1362 1333 self.ui.debug(_("found new branch changeset %s\n") %
1363 1334 short(p))
1364 1335 fetch.add(p)
1365 1336 base[i] = 1
1366 1337 else:
1367 1338 self.ui.debug(_("narrowed branch search to %s:%s\n")
1368 1339 % (short(p), short(i)))
1369 1340 newsearch.append((p, i))
1370 1341 break
1371 1342 p, f = i, f * 2
1372 1343 search = newsearch
1373 1344
1374 1345 # sanity check our fetch list
1375 1346 for f in fetch:
1376 1347 if f in m:
1377 1348 raise error.RepoError(_("already have changeset ")
1378 1349 + short(f[:4]))
1379 1350
1380 1351 if base.keys() == [nullid]:
1381 1352 if force:
1382 1353 self.ui.warn(_("warning: repository is unrelated\n"))
1383 1354 else:
1384 1355 raise util.Abort(_("repository is unrelated"))
1385 1356
1386 1357 self.ui.debug(_("found new changesets starting at ") +
1387 1358 " ".join([short(f) for f in fetch]) + "\n")
1388 1359
1389 1360 self.ui.debug(_("%d total queries\n") % reqcnt)
1390 1361
1391 1362 return base.keys(), list(fetch), heads
1392 1363
1393 1364 def findoutgoing(self, remote, base=None, heads=None, force=False):
1394 1365 """Return list of nodes that are roots of subsets not in remote
1395 1366
1396 1367 If base dict is specified, assume that these nodes and their parents
1397 1368 exist on the remote side.
1398 1369 If a list of heads is specified, return only nodes which are heads
1399 1370 or ancestors of these heads, and return a second element which
1400 1371 contains all remote heads which get new children.
1401 1372 """
1402 1373 if base == None:
1403 1374 base = {}
1404 1375 self.findincoming(remote, base, heads, force=force)
1405 1376
1406 1377 self.ui.debug(_("common changesets up to ")
1407 1378 + " ".join(map(short, base.keys())) + "\n")
1408 1379
1409 1380 remain = set(self.changelog.nodemap)
1410 1381
1411 1382 # prune everything remote has from the tree
1412 1383 remain.remove(nullid)
1413 1384 remove = base.keys()
1414 1385 while remove:
1415 1386 n = remove.pop(0)
1416 1387 if n in remain:
1417 1388 remain.remove(n)
1418 1389 for p in self.changelog.parents(n):
1419 1390 remove.append(p)
1420 1391
1421 1392 # find every node whose parents have been pruned
1422 1393 subset = []
1423 1394 # find every remote head that will get new children
1424 1395 updated_heads = {}
1425 1396 for n in remain:
1426 1397 p1, p2 = self.changelog.parents(n)
1427 1398 if p1 not in remain and p2 not in remain:
1428 1399 subset.append(n)
1429 1400 if heads:
1430 1401 if p1 in heads:
1431 1402 updated_heads[p1] = True
1432 1403 if p2 in heads:
1433 1404 updated_heads[p2] = True
1434 1405
1435 1406 # this is the set of all roots we have to push
1436 1407 if heads:
1437 1408 return subset, updated_heads.keys()
1438 1409 else:
1439 1410 return subset
1440 1411
1441 1412 def pull(self, remote, heads=None, force=False):
1442 1413 lock = self.lock()
1443 1414 try:
1444 1415 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1445 1416 force=force)
1446 1417 if fetch == [nullid]:
1447 1418 self.ui.status(_("requesting all changes\n"))
1448 1419
1449 1420 if not fetch:
1450 1421 self.ui.status(_("no changes found\n"))
1451 1422 return 0
1452 1423
1453 1424 if heads is None and remote.capable('changegroupsubset'):
1454 1425 heads = rheads
1455 1426
1456 1427 if heads is None:
1457 1428 cg = remote.changegroup(fetch, 'pull')
1458 1429 else:
1459 1430 if not remote.capable('changegroupsubset'):
1460 1431 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1461 1432 cg = remote.changegroupsubset(fetch, heads, 'pull')
1462 1433 return self.addchangegroup(cg, 'pull', remote.url())
1463 1434 finally:
1464 1435 lock.release()
1465 1436
1466 1437 def push(self, remote, force=False, revs=None):
1467 1438 # there are two ways to push to remote repo:
1468 1439 #
1469 1440 # addchangegroup assumes local user can lock remote
1470 1441 # repo (local filesystem, old ssh servers).
1471 1442 #
1472 1443 # unbundle assumes local user cannot lock remote repo (new ssh
1473 1444 # servers, http servers).
1474 1445
1475 1446 if remote.capable('unbundle'):
1476 1447 return self.push_unbundle(remote, force, revs)
1477 1448 return self.push_addchangegroup(remote, force, revs)
1478 1449
1479 1450 def prepush(self, remote, force, revs):
1480 1451 common = {}
1481 1452 remote_heads = remote.heads()
1482 1453 inc = self.findincoming(remote, common, remote_heads, force=force)
1483 1454
1484 1455 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1485 1456 if revs is not None:
1486 1457 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1487 1458 else:
1488 1459 bases, heads = update, self.changelog.heads()
1489 1460
1490 1461 if not bases:
1491 1462 self.ui.status(_("no changes found\n"))
1492 1463 return None, 1
1493 1464 elif not force:
1494 1465 # check if we're creating new remote heads
1495 1466 # to be a remote head after push, node must be either
1496 1467 # - unknown locally
1497 1468 # - a local outgoing head descended from update
1498 1469 # - a remote head that's known locally and not
1499 1470 # ancestral to an outgoing head
1500 1471
1501 1472 warn = 0
1502 1473
1503 1474 if remote_heads == [nullid]:
1504 1475 warn = 0
1505 1476 elif not revs and len(heads) > len(remote_heads):
1506 1477 warn = 1
1507 1478 else:
1508 1479 newheads = list(heads)
1509 1480 for r in remote_heads:
1510 1481 if r in self.changelog.nodemap:
1511 1482 desc = self.changelog.heads(r, heads)
1512 1483 l = [h for h in heads if h in desc]
1513 1484 if not l:
1514 1485 newheads.append(r)
1515 1486 else:
1516 1487 newheads.append(r)
1517 1488 if len(newheads) > len(remote_heads):
1518 1489 warn = 1
1519 1490
1520 1491 if warn:
1521 1492 self.ui.warn(_("abort: push creates new remote heads!\n"))
1522 1493 self.ui.status(_("(did you forget to merge?"
1523 1494 " use push -f to force)\n"))
1524 1495 return None, 0
1525 1496 elif inc:
1526 1497 self.ui.warn(_("note: unsynced remote changes!\n"))
1527 1498
1528 1499
1529 1500 if revs is None:
1530 1501 # use the fast path, no race possible on push
1531 1502 cg = self._changegroup(common.keys(), 'push')
1532 1503 else:
1533 1504 cg = self.changegroupsubset(update, revs, 'push')
1534 1505 return cg, remote_heads
1535 1506
1536 1507 def push_addchangegroup(self, remote, force, revs):
1537 1508 lock = remote.lock()
1538 1509 try:
1539 1510 ret = self.prepush(remote, force, revs)
1540 1511 if ret[0] is not None:
1541 1512 cg, remote_heads = ret
1542 1513 return remote.addchangegroup(cg, 'push', self.url())
1543 1514 return ret[1]
1544 1515 finally:
1545 1516 lock.release()
1546 1517
1547 1518 def push_unbundle(self, remote, force, revs):
1548 1519 # local repo finds heads on server, finds out what revs it
1549 1520 # must push. once revs transferred, if server finds it has
1550 1521 # different heads (someone else won commit/push race), server
1551 1522 # aborts.
1552 1523
1553 1524 ret = self.prepush(remote, force, revs)
1554 1525 if ret[0] is not None:
1555 1526 cg, remote_heads = ret
1556 1527 if force: remote_heads = ['force']
1557 1528 return remote.unbundle(cg, remote_heads, 'push')
1558 1529 return ret[1]
1559 1530
1560 1531 def changegroupinfo(self, nodes, source):
1561 1532 if self.ui.verbose or source == 'bundle':
1562 1533 self.ui.status(_("%d changesets found\n") % len(nodes))
1563 1534 if self.ui.debugflag:
1564 1535 self.ui.debug(_("list of changesets:\n"))
1565 1536 for node in nodes:
1566 1537 self.ui.debug("%s\n" % hex(node))
1567 1538
1568 1539 def changegroupsubset(self, bases, heads, source, extranodes=None):
1569 1540 """This function generates a changegroup consisting of all the nodes
1570 1541 that are descendents of any of the bases, and ancestors of any of
1571 1542 the heads.
1572 1543
1573 1544 It is fairly complex as determining which filenodes and which
1574 1545 manifest nodes need to be included for the changeset to be complete
1575 1546 is non-trivial.
1576 1547
1577 1548 Another wrinkle is doing the reverse, figuring out which changeset in
1578 1549 the changegroup a particular filenode or manifestnode belongs to.
1579 1550
1580 1551 The caller can specify some nodes that must be included in the
1581 1552 changegroup using the extranodes argument. It should be a dict
1582 1553 where the keys are the filenames (or 1 for the manifest), and the
1583 1554 values are lists of (node, linknode) tuples, where node is a wanted
1584 1555 node and linknode is the changelog node that should be transmitted as
1585 1556 the linkrev.
1586 1557 """
1587 1558
1588 1559 if extranodes is None:
1589 1560 # can we go through the fast path ?
1590 1561 heads.sort()
1591 1562 allheads = self.heads()
1592 1563 allheads.sort()
1593 1564 if heads == allheads:
1594 1565 common = []
1595 1566 # parents of bases are known from both sides
1596 1567 for n in bases:
1597 1568 for p in self.changelog.parents(n):
1598 1569 if p != nullid:
1599 1570 common.append(p)
1600 1571 return self._changegroup(common, source)
1601 1572
1602 1573 self.hook('preoutgoing', throw=True, source=source)
1603 1574
1604 1575 # Set up some initial variables
1605 1576 # Make it easy to refer to self.changelog
1606 1577 cl = self.changelog
1607 1578 # msng is short for missing - compute the list of changesets in this
1608 1579 # changegroup.
1609 1580 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1610 1581 self.changegroupinfo(msng_cl_lst, source)
1611 1582 # Some bases may turn out to be superfluous, and some heads may be
1612 1583 # too. nodesbetween will return the minimal set of bases and heads
1613 1584 # necessary to re-create the changegroup.
1614 1585
1615 1586 # Known heads are the list of heads that it is assumed the recipient
1616 1587 # of this changegroup will know about.
1617 1588 knownheads = {}
1618 1589 # We assume that all parents of bases are known heads.
1619 1590 for n in bases:
1620 1591 for p in cl.parents(n):
1621 1592 if p != nullid:
1622 1593 knownheads[p] = 1
1623 1594 knownheads = knownheads.keys()
1624 1595 if knownheads:
1625 1596 # Now that we know what heads are known, we can compute which
1626 1597 # changesets are known. The recipient must know about all
1627 1598 # changesets required to reach the known heads from the null
1628 1599 # changeset.
1629 1600 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1630 1601 junk = None
1631 1602 # Transform the list into a set.
1632 1603 has_cl_set = set(has_cl_set)
1633 1604 else:
1634 1605 # If there were no known heads, the recipient cannot be assumed to
1635 1606 # know about any changesets.
1636 1607 has_cl_set = set()
1637 1608
1638 1609 # Make it easy to refer to self.manifest
1639 1610 mnfst = self.manifest
1640 1611 # We don't know which manifests are missing yet
1641 1612 msng_mnfst_set = {}
1642 1613 # Nor do we know which filenodes are missing.
1643 1614 msng_filenode_set = {}
1644 1615
1645 1616 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1646 1617 junk = None
1647 1618
1648 1619 # A changeset always belongs to itself, so the changenode lookup
1649 1620 # function for a changenode is identity.
1650 1621 def identity(x):
1651 1622 return x
1652 1623
1653 1624 # A function generating function. Sets up an environment for the
1654 1625 # inner function.
1655 1626 def cmp_by_rev_func(revlog):
1656 1627 # Compare two nodes by their revision number in the environment's
1657 1628 # revision history. Since the revision number both represents the
1658 1629 # most efficient order to read the nodes in, and represents a
1659 1630 # topological sorting of the nodes, this function is often useful.
1660 1631 def cmp_by_rev(a, b):
1661 1632 return cmp(revlog.rev(a), revlog.rev(b))
1662 1633 return cmp_by_rev
1663 1634
1664 1635 # If we determine that a particular file or manifest node must be a
1665 1636 # node that the recipient of the changegroup will already have, we can
1666 1637 # also assume the recipient will have all the parents. This function
1667 1638 # prunes them from the set of missing nodes.
1668 1639 def prune_parents(revlog, hasset, msngset):
1669 1640 haslst = hasset.keys()
1670 1641 haslst.sort(cmp_by_rev_func(revlog))
1671 1642 for node in haslst:
1672 1643 parentlst = [p for p in revlog.parents(node) if p != nullid]
1673 1644 while parentlst:
1674 1645 n = parentlst.pop()
1675 1646 if n not in hasset:
1676 1647 hasset[n] = 1
1677 1648 p = [p for p in revlog.parents(n) if p != nullid]
1678 1649 parentlst.extend(p)
1679 1650 for n in hasset:
1680 1651 msngset.pop(n, None)
1681 1652
1682 1653 # This is a function generating function used to set up an environment
1683 1654 # for the inner function to execute in.
1684 1655 def manifest_and_file_collector(changedfileset):
1685 1656 # This is an information gathering function that gathers
1686 1657 # information from each changeset node that goes out as part of
1687 1658 # the changegroup. The information gathered is a list of which
1688 1659 # manifest nodes are potentially required (the recipient may
1689 1660 # already have them) and total list of all files which were
1690 1661 # changed in any changeset in the changegroup.
1691 1662 #
1692 1663 # We also remember the first changenode we saw any manifest
1693 1664 # referenced by so we can later determine which changenode 'owns'
1694 1665 # the manifest.
1695 1666 def collect_manifests_and_files(clnode):
1696 1667 c = cl.read(clnode)
1697 1668 for f in c[3]:
1698 1669 # This is to make sure we only have one instance of each
1699 1670 # filename string for each filename.
1700 1671 changedfileset.setdefault(f, f)
1701 1672 msng_mnfst_set.setdefault(c[0], clnode)
1702 1673 return collect_manifests_and_files
1703 1674
1704 1675 # Figure out which manifest nodes (of the ones we think might be part
1705 1676 # of the changegroup) the recipient must know about and remove them
1706 1677 # from the changegroup.
1707 1678 def prune_manifests():
1708 1679 has_mnfst_set = {}
1709 1680 for n in msng_mnfst_set:
1710 1681 # If a 'missing' manifest thinks it belongs to a changenode
1711 1682 # the recipient is assumed to have, obviously the recipient
1712 1683 # must have that manifest.
1713 1684 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1714 1685 if linknode in has_cl_set:
1715 1686 has_mnfst_set[n] = 1
1716 1687 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1717 1688
1718 1689 # Use the information collected in collect_manifests_and_files to say
1719 1690 # which changenode any manifestnode belongs to.
1720 1691 def lookup_manifest_link(mnfstnode):
1721 1692 return msng_mnfst_set[mnfstnode]
1722 1693
1723 1694 # A function generating function that sets up the initial environment
1724 1695 # the inner function.
1725 1696 def filenode_collector(changedfiles):
1726 1697 next_rev = [0]
1727 1698 # This gathers information from each manifestnode included in the
1728 1699 # changegroup about which filenodes the manifest node references
1729 1700 # so we can include those in the changegroup too.
1730 1701 #
1731 1702 # It also remembers which changenode each filenode belongs to. It
1732 1703 # does this by assuming the a filenode belongs to the changenode
1733 1704 # the first manifest that references it belongs to.
1734 1705 def collect_msng_filenodes(mnfstnode):
1735 1706 r = mnfst.rev(mnfstnode)
1736 1707 if r == next_rev[0]:
1737 1708 # If the last rev we looked at was the one just previous,
1738 1709 # we only need to see a diff.
1739 1710 deltamf = mnfst.readdelta(mnfstnode)
1740 1711 # For each line in the delta
1741 1712 for f, fnode in deltamf.iteritems():
1742 1713 f = changedfiles.get(f, None)
1743 1714 # And if the file is in the list of files we care
1744 1715 # about.
1745 1716 if f is not None:
1746 1717 # Get the changenode this manifest belongs to
1747 1718 clnode = msng_mnfst_set[mnfstnode]
1748 1719 # Create the set of filenodes for the file if
1749 1720 # there isn't one already.
1750 1721 ndset = msng_filenode_set.setdefault(f, {})
1751 1722 # And set the filenode's changelog node to the
1752 1723 # manifest's if it hasn't been set already.
1753 1724 ndset.setdefault(fnode, clnode)
1754 1725 else:
1755 1726 # Otherwise we need a full manifest.
1756 1727 m = mnfst.read(mnfstnode)
1757 1728 # For every file in we care about.
1758 1729 for f in changedfiles:
1759 1730 fnode = m.get(f, None)
1760 1731 # If it's in the manifest
1761 1732 if fnode is not None:
1762 1733 # See comments above.
1763 1734 clnode = msng_mnfst_set[mnfstnode]
1764 1735 ndset = msng_filenode_set.setdefault(f, {})
1765 1736 ndset.setdefault(fnode, clnode)
1766 1737 # Remember the revision we hope to see next.
1767 1738 next_rev[0] = r + 1
1768 1739 return collect_msng_filenodes
1769 1740
1770 1741 # We have a list of filenodes we think we need for a file, lets remove
1771 1742 # all those we know the recipient must have.
1772 1743 def prune_filenodes(f, filerevlog):
1773 1744 msngset = msng_filenode_set[f]
1774 1745 hasset = {}
1775 1746 # If a 'missing' filenode thinks it belongs to a changenode we
1776 1747 # assume the recipient must have, then the recipient must have
1777 1748 # that filenode.
1778 1749 for n in msngset:
1779 1750 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1780 1751 if clnode in has_cl_set:
1781 1752 hasset[n] = 1
1782 1753 prune_parents(filerevlog, hasset, msngset)
1783 1754
1784 1755 # A function generator function that sets up the a context for the
1785 1756 # inner function.
1786 1757 def lookup_filenode_link_func(fname):
1787 1758 msngset = msng_filenode_set[fname]
1788 1759 # Lookup the changenode the filenode belongs to.
1789 1760 def lookup_filenode_link(fnode):
1790 1761 return msngset[fnode]
1791 1762 return lookup_filenode_link
1792 1763
1793 1764 # Add the nodes that were explicitly requested.
1794 1765 def add_extra_nodes(name, nodes):
1795 1766 if not extranodes or name not in extranodes:
1796 1767 return
1797 1768
1798 1769 for node, linknode in extranodes[name]:
1799 1770 if node not in nodes:
1800 1771 nodes[node] = linknode
1801 1772
1802 1773 # Now that we have all theses utility functions to help out and
1803 1774 # logically divide up the task, generate the group.
1804 1775 def gengroup():
1805 1776 # The set of changed files starts empty.
1806 1777 changedfiles = {}
1807 1778 # Create a changenode group generator that will call our functions
1808 1779 # back to lookup the owning changenode and collect information.
1809 1780 group = cl.group(msng_cl_lst, identity,
1810 1781 manifest_and_file_collector(changedfiles))
1811 1782 for chnk in group:
1812 1783 yield chnk
1813 1784
1814 1785 # The list of manifests has been collected by the generator
1815 1786 # calling our functions back.
1816 1787 prune_manifests()
1817 1788 add_extra_nodes(1, msng_mnfst_set)
1818 1789 msng_mnfst_lst = msng_mnfst_set.keys()
1819 1790 # Sort the manifestnodes by revision number.
1820 1791 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1821 1792 # Create a generator for the manifestnodes that calls our lookup
1822 1793 # and data collection functions back.
1823 1794 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1824 1795 filenode_collector(changedfiles))
1825 1796 for chnk in group:
1826 1797 yield chnk
1827 1798
1828 1799 # These are no longer needed, dereference and toss the memory for
1829 1800 # them.
1830 1801 msng_mnfst_lst = None
1831 1802 msng_mnfst_set.clear()
1832 1803
1833 1804 if extranodes:
1834 1805 for fname in extranodes:
1835 1806 if isinstance(fname, int):
1836 1807 continue
1837 1808 msng_filenode_set.setdefault(fname, {})
1838 1809 changedfiles[fname] = 1
1839 1810 # Go through all our files in order sorted by name.
1840 1811 for fname in sorted(changedfiles):
1841 1812 filerevlog = self.file(fname)
1842 1813 if not len(filerevlog):
1843 1814 raise util.Abort(_("empty or missing revlog for %s") % fname)
1844 1815 # Toss out the filenodes that the recipient isn't really
1845 1816 # missing.
1846 1817 if fname in msng_filenode_set:
1847 1818 prune_filenodes(fname, filerevlog)
1848 1819 add_extra_nodes(fname, msng_filenode_set[fname])
1849 1820 msng_filenode_lst = msng_filenode_set[fname].keys()
1850 1821 else:
1851 1822 msng_filenode_lst = []
1852 1823 # If any filenodes are left, generate the group for them,
1853 1824 # otherwise don't bother.
1854 1825 if len(msng_filenode_lst) > 0:
1855 1826 yield changegroup.chunkheader(len(fname))
1856 1827 yield fname
1857 1828 # Sort the filenodes by their revision #
1858 1829 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1859 1830 # Create a group generator and only pass in a changenode
1860 1831 # lookup function as we need to collect no information
1861 1832 # from filenodes.
1862 1833 group = filerevlog.group(msng_filenode_lst,
1863 1834 lookup_filenode_link_func(fname))
1864 1835 for chnk in group:
1865 1836 yield chnk
1866 1837 if fname in msng_filenode_set:
1867 1838 # Don't need this anymore, toss it to free memory.
1868 1839 del msng_filenode_set[fname]
1869 1840 # Signal that no more groups are left.
1870 1841 yield changegroup.closechunk()
1871 1842
1872 1843 if msng_cl_lst:
1873 1844 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1874 1845
1875 1846 return util.chunkbuffer(gengroup())
1876 1847
1877 1848 def changegroup(self, basenodes, source):
1878 1849 # to avoid a race we use changegroupsubset() (issue1320)
1879 1850 return self.changegroupsubset(basenodes, self.heads(), source)
1880 1851
1881 1852 def _changegroup(self, common, source):
1882 1853 """Generate a changegroup of all nodes that we have that a recipient
1883 1854 doesn't.
1884 1855
1885 1856 This is much easier than the previous function as we can assume that
1886 1857 the recipient has any changenode we aren't sending them.
1887 1858
1888 1859 common is the set of common nodes between remote and self"""
1889 1860
1890 1861 self.hook('preoutgoing', throw=True, source=source)
1891 1862
1892 1863 cl = self.changelog
1893 1864 nodes = cl.findmissing(common)
1894 1865 revset = set([cl.rev(n) for n in nodes])
1895 1866 self.changegroupinfo(nodes, source)
1896 1867
1897 1868 def identity(x):
1898 1869 return x
1899 1870
1900 1871 def gennodelst(log):
1901 1872 for r in log:
1902 1873 if log.linkrev(r) in revset:
1903 1874 yield log.node(r)
1904 1875
1905 1876 def changed_file_collector(changedfileset):
1906 1877 def collect_changed_files(clnode):
1907 1878 c = cl.read(clnode)
1908 1879 for fname in c[3]:
1909 1880 changedfileset[fname] = 1
1910 1881 return collect_changed_files
1911 1882
1912 1883 def lookuprevlink_func(revlog):
1913 1884 def lookuprevlink(n):
1914 1885 return cl.node(revlog.linkrev(revlog.rev(n)))
1915 1886 return lookuprevlink
1916 1887
1917 1888 def gengroup():
1918 1889 # construct a list of all changed files
1919 1890 changedfiles = {}
1920 1891
1921 1892 for chnk in cl.group(nodes, identity,
1922 1893 changed_file_collector(changedfiles)):
1923 1894 yield chnk
1924 1895
1925 1896 mnfst = self.manifest
1926 1897 nodeiter = gennodelst(mnfst)
1927 1898 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1928 1899 yield chnk
1929 1900
1930 1901 for fname in sorted(changedfiles):
1931 1902 filerevlog = self.file(fname)
1932 1903 if not len(filerevlog):
1933 1904 raise util.Abort(_("empty or missing revlog for %s") % fname)
1934 1905 nodeiter = gennodelst(filerevlog)
1935 1906 nodeiter = list(nodeiter)
1936 1907 if nodeiter:
1937 1908 yield changegroup.chunkheader(len(fname))
1938 1909 yield fname
1939 1910 lookup = lookuprevlink_func(filerevlog)
1940 1911 for chnk in filerevlog.group(nodeiter, lookup):
1941 1912 yield chnk
1942 1913
1943 1914 yield changegroup.closechunk()
1944 1915
1945 1916 if nodes:
1946 1917 self.hook('outgoing', node=hex(nodes[0]), source=source)
1947 1918
1948 1919 return util.chunkbuffer(gengroup())
1949 1920
1950 1921 def addchangegroup(self, source, srctype, url, emptyok=False):
1951 1922 """add changegroup to repo.
1952 1923
1953 1924 return values:
1954 1925 - nothing changed or no source: 0
1955 1926 - more heads than before: 1+added heads (2..n)
1956 1927 - less heads than before: -1-removed heads (-2..-n)
1957 1928 - number of heads stays the same: 1
1958 1929 """
1959 1930 def csmap(x):
1960 1931 self.ui.debug(_("add changeset %s\n") % short(x))
1961 1932 return len(cl)
1962 1933
1963 1934 def revmap(x):
1964 1935 return cl.rev(x)
1965 1936
1966 1937 if not source:
1967 1938 return 0
1968 1939
1969 1940 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1970 1941
1971 1942 changesets = files = revisions = 0
1972 1943
1973 1944 # write changelog data to temp files so concurrent readers will not see
1974 1945 # inconsistent view
1975 1946 cl = self.changelog
1976 1947 cl.delayupdate()
1977 1948 oldheads = len(cl.heads())
1978 1949
1979 1950 tr = self.transaction()
1980 1951 try:
1981 1952 trp = weakref.proxy(tr)
1982 1953 # pull off the changeset group
1983 1954 self.ui.status(_("adding changesets\n"))
1984 1955 clstart = len(cl)
1985 1956 chunkiter = changegroup.chunkiter(source)
1986 1957 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1987 1958 raise util.Abort(_("received changelog group is empty"))
1988 1959 clend = len(cl)
1989 1960 changesets = clend - clstart
1990 1961
1991 1962 # pull off the manifest group
1992 1963 self.ui.status(_("adding manifests\n"))
1993 1964 chunkiter = changegroup.chunkiter(source)
1994 1965 # no need to check for empty manifest group here:
1995 1966 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1996 1967 # no new manifest will be created and the manifest group will
1997 1968 # be empty during the pull
1998 1969 self.manifest.addgroup(chunkiter, revmap, trp)
1999 1970
2000 1971 # process the files
2001 1972 self.ui.status(_("adding file changes\n"))
2002 1973 while 1:
2003 1974 f = changegroup.getchunk(source)
2004 1975 if not f:
2005 1976 break
2006 1977 self.ui.debug(_("adding %s revisions\n") % f)
2007 1978 fl = self.file(f)
2008 1979 o = len(fl)
2009 1980 chunkiter = changegroup.chunkiter(source)
2010 1981 if fl.addgroup(chunkiter, revmap, trp) is None:
2011 1982 raise util.Abort(_("received file revlog group is empty"))
2012 1983 revisions += len(fl) - o
2013 1984 files += 1
2014 1985
2015 1986 newheads = len(cl.heads())
2016 1987 heads = ""
2017 1988 if oldheads and newheads != oldheads:
2018 1989 heads = _(" (%+d heads)") % (newheads - oldheads)
2019 1990
2020 1991 self.ui.status(_("added %d changesets"
2021 1992 " with %d changes to %d files%s\n")
2022 1993 % (changesets, revisions, files, heads))
2023 1994
2024 1995 if changesets > 0:
2025 1996 p = lambda: cl.writepending() and self.root or ""
2026 1997 self.hook('pretxnchangegroup', throw=True,
2027 1998 node=hex(cl.node(clstart)), source=srctype,
2028 1999 url=url, pending=p)
2029 2000
2030 2001 # make changelog see real files again
2031 2002 cl.finalize(trp)
2032 2003
2033 2004 tr.close()
2034 2005 finally:
2035 2006 del tr
2036 2007
2037 2008 if changesets > 0:
2038 2009 # forcefully update the on-disk branch cache
2039 2010 self.ui.debug(_("updating the branch cache\n"))
2040 2011 self.branchtags()
2041 2012 self.hook("changegroup", node=hex(cl.node(clstart)),
2042 2013 source=srctype, url=url)
2043 2014
2044 2015 for i in xrange(clstart, clend):
2045 2016 self.hook("incoming", node=hex(cl.node(i)),
2046 2017 source=srctype, url=url)
2047 2018
2048 2019 # never return 0 here:
2049 2020 if newheads < oldheads:
2050 2021 return newheads - oldheads - 1
2051 2022 else:
2052 2023 return newheads - oldheads + 1
2053 2024
2054 2025
2055 2026 def stream_in(self, remote):
2056 2027 fp = remote.stream_out()
2057 2028 l = fp.readline()
2058 2029 try:
2059 2030 resp = int(l)
2060 2031 except ValueError:
2061 2032 raise error.ResponseError(
2062 2033 _('Unexpected response from remote server:'), l)
2063 2034 if resp == 1:
2064 2035 raise util.Abort(_('operation forbidden by server'))
2065 2036 elif resp == 2:
2066 2037 raise util.Abort(_('locking the remote repository failed'))
2067 2038 elif resp != 0:
2068 2039 raise util.Abort(_('the server sent an unknown error code'))
2069 2040 self.ui.status(_('streaming all changes\n'))
2070 2041 l = fp.readline()
2071 2042 try:
2072 2043 total_files, total_bytes = map(int, l.split(' ', 1))
2073 2044 except (ValueError, TypeError):
2074 2045 raise error.ResponseError(
2075 2046 _('Unexpected response from remote server:'), l)
2076 2047 self.ui.status(_('%d files to transfer, %s of data\n') %
2077 2048 (total_files, util.bytecount(total_bytes)))
2078 2049 start = time.time()
2079 2050 for i in xrange(total_files):
2080 2051 # XXX doesn't support '\n' or '\r' in filenames
2081 2052 l = fp.readline()
2082 2053 try:
2083 2054 name, size = l.split('\0', 1)
2084 2055 size = int(size)
2085 2056 except (ValueError, TypeError):
2086 2057 raise error.ResponseError(
2087 2058 _('Unexpected response from remote server:'), l)
2088 2059 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2089 2060 ofp = self.sopener(name, 'w')
2090 2061 for chunk in util.filechunkiter(fp, limit=size):
2091 2062 ofp.write(chunk)
2092 2063 ofp.close()
2093 2064 elapsed = time.time() - start
2094 2065 if elapsed <= 0:
2095 2066 elapsed = 0.001
2096 2067 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2097 2068 (util.bytecount(total_bytes), elapsed,
2098 2069 util.bytecount(total_bytes / elapsed)))
2099 2070 self.invalidate()
2100 2071 return len(self.heads()) + 1
2101 2072
2102 2073 def clone(self, remote, heads=[], stream=False):
2103 2074 '''clone remote repository.
2104 2075
2105 2076 keyword arguments:
2106 2077 heads: list of revs to clone (forces use of pull)
2107 2078 stream: use streaming clone if possible'''
2108 2079
2109 2080 # now, all clients that can request uncompressed clones can
2110 2081 # read repo formats supported by all servers that can serve
2111 2082 # them.
2112 2083
2113 2084 # if revlog format changes, client will have to check version
2114 2085 # and format flags on "stream" capability, and use
2115 2086 # uncompressed only if compatible.
2116 2087
2117 2088 if stream and not heads and remote.capable('stream'):
2118 2089 return self.stream_in(remote)
2119 2090 return self.pull(remote, heads)
2120 2091
2121 2092 # used to avoid circular references so destructors work
2122 2093 def aftertrans(files):
2123 2094 renamefiles = [tuple(t) for t in files]
2124 2095 def a():
2125 2096 for src, dest in renamefiles:
2126 2097 util.rename(src, dest)
2127 2098 return a
2128 2099
2129 2100 def instance(ui, path, create):
2130 2101 return localrepository(ui, util.drop_scheme('file', path), create)
2131 2102
2132 2103 def islocal(path):
2133 2104 return True
General Comments 0
You need to be logged in to leave comments. Login now