##// END OF EJS Templates
commit: move commit editor to cmdutil, pass as function
Matt Mackall -
r8407:223000a6 default
parent child Browse files
Show More
@@ -1,147 +1,148 b''
1 # fetch.py - pull and merge remote changes
1 # fetch.py - pull and merge remote changes
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''pulling, updating and merging in one command'''
8 '''pulling, updating and merging in one command'''
9
9
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial.node import nullid, short
11 from mercurial.node import nullid, short
12 from mercurial import commands, cmdutil, hg, util, url
12 from mercurial import commands, cmdutil, hg, util, url
13 from mercurial.lock import release
13 from mercurial.lock import release
14
14
15 def fetch(ui, repo, source='default', **opts):
15 def fetch(ui, repo, source='default', **opts):
16 '''pull changes from a remote repository, merge new changes if needed.
16 '''pull changes from a remote repository, merge new changes if needed.
17
17
18 This finds all changes from the repository at the specified path
18 This finds all changes from the repository at the specified path
19 or URL and adds them to the local repository.
19 or URL and adds them to the local repository.
20
20
21 If the pulled changes add a new branch head, the head is
21 If the pulled changes add a new branch head, the head is
22 automatically merged, and the result of the merge is committed.
22 automatically merged, and the result of the merge is committed.
23 Otherwise, the working directory is updated to include the new
23 Otherwise, the working directory is updated to include the new
24 changes.
24 changes.
25
25
26 When a merge occurs, the newly pulled changes are assumed to be
26 When a merge occurs, the newly pulled changes are assumed to be
27 "authoritative". The head of the new changes is used as the first
27 "authoritative". The head of the new changes is used as the first
28 parent, with local changes as the second. To switch the merge
28 parent, with local changes as the second. To switch the merge
29 order, use --switch-parent.
29 order, use --switch-parent.
30
30
31 See 'hg help dates' for a list of formats valid for -d/--date.
31 See 'hg help dates' for a list of formats valid for -d/--date.
32 '''
32 '''
33
33
34 date = opts.get('date')
34 date = opts.get('date')
35 if date:
35 if date:
36 opts['date'] = util.parsedate(date)
36 opts['date'] = util.parsedate(date)
37
37
38 parent, p2 = repo.dirstate.parents()
38 parent, p2 = repo.dirstate.parents()
39 branch = repo.dirstate.branch()
39 branch = repo.dirstate.branch()
40 branchnode = repo.branchtags().get(branch)
40 branchnode = repo.branchtags().get(branch)
41 if parent != branchnode:
41 if parent != branchnode:
42 raise util.Abort(_('working dir not at branch tip '
42 raise util.Abort(_('working dir not at branch tip '
43 '(use "hg update" to check out branch tip)'))
43 '(use "hg update" to check out branch tip)'))
44
44
45 if p2 != nullid:
45 if p2 != nullid:
46 raise util.Abort(_('outstanding uncommitted merge'))
46 raise util.Abort(_('outstanding uncommitted merge'))
47
47
48 wlock = lock = None
48 wlock = lock = None
49 try:
49 try:
50 wlock = repo.wlock()
50 wlock = repo.wlock()
51 lock = repo.lock()
51 lock = repo.lock()
52 mod, add, rem, del_ = repo.status()[:4]
52 mod, add, rem, del_ = repo.status()[:4]
53
53
54 if mod or add or rem:
54 if mod or add or rem:
55 raise util.Abort(_('outstanding uncommitted changes'))
55 raise util.Abort(_('outstanding uncommitted changes'))
56 if del_:
56 if del_:
57 raise util.Abort(_('working directory is missing some files'))
57 raise util.Abort(_('working directory is missing some files'))
58 bheads = repo.branchheads(branch)
58 bheads = repo.branchheads(branch)
59 bheads = [head for head in bheads if len(repo[head].children()) == 0]
59 bheads = [head for head in bheads if len(repo[head].children()) == 0]
60 if len(bheads) > 1:
60 if len(bheads) > 1:
61 raise util.Abort(_('multiple heads in this branch '
61 raise util.Abort(_('multiple heads in this branch '
62 '(use "hg heads ." and "hg merge" to merge)'))
62 '(use "hg heads ." and "hg merge" to merge)'))
63
63
64 other = hg.repository(cmdutil.remoteui(repo, opts),
64 other = hg.repository(cmdutil.remoteui(repo, opts),
65 ui.expandpath(source))
65 ui.expandpath(source))
66 ui.status(_('pulling from %s\n') %
66 ui.status(_('pulling from %s\n') %
67 url.hidepassword(ui.expandpath(source)))
67 url.hidepassword(ui.expandpath(source)))
68 revs = None
68 revs = None
69 if opts['rev']:
69 if opts['rev']:
70 if not other.local():
70 if not other.local():
71 raise util.Abort(_("fetch -r doesn't work for remote "
71 raise util.Abort(_("fetch -r doesn't work for remote "
72 "repositories yet"))
72 "repositories yet"))
73 else:
73 else:
74 revs = [other.lookup(rev) for rev in opts['rev']]
74 revs = [other.lookup(rev) for rev in opts['rev']]
75
75
76 # Are there any changes at all?
76 # Are there any changes at all?
77 modheads = repo.pull(other, heads=revs)
77 modheads = repo.pull(other, heads=revs)
78 if modheads == 0:
78 if modheads == 0:
79 return 0
79 return 0
80
80
81 # Is this a simple fast-forward along the current branch?
81 # Is this a simple fast-forward along the current branch?
82 newheads = repo.branchheads(branch)
82 newheads = repo.branchheads(branch)
83 newheads = [head for head in newheads if len(repo[head].children()) == 0]
83 newheads = [head for head in newheads if len(repo[head].children()) == 0]
84 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
84 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
85 if len(newheads) == 1:
85 if len(newheads) == 1:
86 if newchildren[0] != parent:
86 if newchildren[0] != parent:
87 return hg.clean(repo, newchildren[0])
87 return hg.clean(repo, newchildren[0])
88 else:
88 else:
89 return
89 return
90
90
91 # Are there more than one additional branch heads?
91 # Are there more than one additional branch heads?
92 newchildren = [n for n in newchildren if n != parent]
92 newchildren = [n for n in newchildren if n != parent]
93 newparent = parent
93 newparent = parent
94 if newchildren:
94 if newchildren:
95 newparent = newchildren[0]
95 newparent = newchildren[0]
96 hg.clean(repo, newparent)
96 hg.clean(repo, newparent)
97 newheads = [n for n in newheads if n != newparent]
97 newheads = [n for n in newheads if n != newparent]
98 if len(newheads) > 1:
98 if len(newheads) > 1:
99 ui.status(_('not merging with %d other new branch heads '
99 ui.status(_('not merging with %d other new branch heads '
100 '(use "hg heads ." and "hg merge" to merge them)\n') %
100 '(use "hg heads ." and "hg merge" to merge them)\n') %
101 (len(newheads) - 1))
101 (len(newheads) - 1))
102 return
102 return
103
103
104 # Otherwise, let's merge.
104 # Otherwise, let's merge.
105 err = False
105 err = False
106 if newheads:
106 if newheads:
107 # By default, we consider the repository we're pulling
107 # By default, we consider the repository we're pulling
108 # *from* as authoritative, so we merge our changes into
108 # *from* as authoritative, so we merge our changes into
109 # theirs.
109 # theirs.
110 if opts['switch_parent']:
110 if opts['switch_parent']:
111 firstparent, secondparent = newparent, newheads[0]
111 firstparent, secondparent = newparent, newheads[0]
112 else:
112 else:
113 firstparent, secondparent = newheads[0], newparent
113 firstparent, secondparent = newheads[0], newparent
114 ui.status(_('updating to %d:%s\n') %
114 ui.status(_('updating to %d:%s\n') %
115 (repo.changelog.rev(firstparent),
115 (repo.changelog.rev(firstparent),
116 short(firstparent)))
116 short(firstparent)))
117 hg.clean(repo, firstparent)
117 hg.clean(repo, firstparent)
118 ui.status(_('merging with %d:%s\n') %
118 ui.status(_('merging with %d:%s\n') %
119 (repo.changelog.rev(secondparent), short(secondparent)))
119 (repo.changelog.rev(secondparent), short(secondparent)))
120 err = hg.merge(repo, secondparent, remind=False)
120 err = hg.merge(repo, secondparent, remind=False)
121
121
122 if not err:
122 if not err:
123 mod, add, rem = repo.status()[:3]
123 mod, add, rem = repo.status()[:3]
124 message = (cmdutil.logmessage(opts) or
124 message = (cmdutil.logmessage(opts) or
125 (_('Automated merge with %s') %
125 (_('Automated merge with %s') %
126 url.removeauth(other.url())))
126 url.removeauth(other.url())))
127 force_editor = opts.get('force_editor') or opts.get('edit')
127 editor = cmdutil.commiteditor
128 n = repo.commit(mod + add + rem, message,
128 if opts.get('force_editor') or opts.get('edit'):
129 opts['user'], opts['date'], force=True,
129 editor = cmdutil.commitforceeditor
130 force_editor=force_editor)
130 n = repo.commit(mod + add + rem, message, opts['user'],
131 opts['date'], force=True, editor=editor)
131 ui.status(_('new changeset %d:%s merges remote changes '
132 ui.status(_('new changeset %d:%s merges remote changes '
132 'with local\n') % (repo.changelog.rev(n),
133 'with local\n') % (repo.changelog.rev(n),
133 short(n)))
134 short(n)))
134
135
135 finally:
136 finally:
136 release(lock, wlock)
137 release(lock, wlock)
137
138
138 cmdtable = {
139 cmdtable = {
139 'fetch':
140 'fetch':
140 (fetch,
141 (fetch,
141 [('r', 'rev', [], _('a specific revision you would like to pull')),
142 [('r', 'rev', [], _('a specific revision you would like to pull')),
142 ('e', 'edit', None, _('edit commit message')),
143 ('e', 'edit', None, _('edit commit message')),
143 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
144 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
144 ('', 'switch-parent', None, _('switch parents when merging')),
145 ('', 'switch-parent', None, _('switch parents when merging')),
145 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
146 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
146 _('hg fetch [SOURCE]')),
147 _('hg fetch [SOURCE]')),
147 }
148 }
@@ -1,536 +1,534 b''
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a DSCM
10 # Keyword expansion hack against the grain of a DSCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an
14 # files (like LaTeX packages), that are mostly addressed to an
15 # audience not running a version control system.
15 # audience not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Setup in hgrc:
24 # Setup in hgrc:
25 #
25 #
26 # [extensions]
26 # [extensions]
27 # # enable extension
27 # # enable extension
28 # hgext.keyword =
28 # hgext.keyword =
29 #
29 #
30 # Files to act upon/ignore are specified in the [keyword] section.
30 # Files to act upon/ignore are specified in the [keyword] section.
31 # Customized keyword template mappings in the [keywordmaps] section.
31 # Customized keyword template mappings in the [keywordmaps] section.
32 #
32 #
33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
34
34
35 '''keyword expansion in local repositories
35 '''keyword expansion in local repositories
36
36
37 This extension expands RCS/CVS-like or self-customized $Keywords$ in
37 This extension expands RCS/CVS-like or self-customized $Keywords$ in
38 tracked text files selected by your configuration.
38 tracked text files selected by your configuration.
39
39
40 Keywords are only expanded in local repositories and not stored in the
40 Keywords are only expanded in local repositories and not stored in the
41 change history. The mechanism can be regarded as a convenience for the
41 change history. The mechanism can be regarded as a convenience for the
42 current user or for archive distribution.
42 current user or for archive distribution.
43
43
44 Configuration is done in the [keyword] and [keywordmaps] sections of
44 Configuration is done in the [keyword] and [keywordmaps] sections of
45 hgrc files.
45 hgrc files.
46
46
47 Example:
47 Example:
48
48
49 [keyword]
49 [keyword]
50 # expand keywords in every python file except those matching "x*"
50 # expand keywords in every python file except those matching "x*"
51 **.py =
51 **.py =
52 x* = ignore
52 x* = ignore
53
53
54 Note: the more specific you are in your filename patterns
54 Note: the more specific you are in your filename patterns
55 the less you lose speed in huge repositories.
55 the less you lose speed in huge repositories.
56
56
57 For [keywordmaps] template mapping and expansion demonstration and
57 For [keywordmaps] template mapping and expansion demonstration and
58 control run "hg kwdemo".
58 control run "hg kwdemo".
59
59
60 An additional date template filter {date|utcdate} is provided.
60 An additional date template filter {date|utcdate} is provided.
61
61
62 The default template mappings (view with "hg kwdemo -d") can be
62 The default template mappings (view with "hg kwdemo -d") can be
63 replaced with customized keywords and templates. Again, run "hg
63 replaced with customized keywords and templates. Again, run "hg
64 kwdemo" to control the results of your config changes.
64 kwdemo" to control the results of your config changes.
65
65
66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
67 the risk of inadvertedly storing expanded keywords in the change
67 the risk of inadvertedly storing expanded keywords in the change
68 history.
68 history.
69
69
70 To force expansion after enabling it, or a configuration change, run
70 To force expansion after enabling it, or a configuration change, run
71 "hg kwexpand".
71 "hg kwexpand".
72
72
73 Also, when committing with the record extension or using mq's qrecord,
73 Also, when committing with the record extension or using mq's qrecord,
74 be aware that keywords cannot be updated. Again, run "hg kwexpand" on
74 be aware that keywords cannot be updated. Again, run "hg kwexpand" on
75 the files in question to update keyword expansions after all changes
75 the files in question to update keyword expansions after all changes
76 have been checked in.
76 have been checked in.
77
77
78 Expansions spanning more than one line and incremental expansions,
78 Expansions spanning more than one line and incremental expansions,
79 like CVS' $Log$, are not supported. A keyword template map
79 like CVS' $Log$, are not supported. A keyword template map
80 "Log = {desc}" expands to the first line of the changeset description.
80 "Log = {desc}" expands to the first line of the changeset description.
81 '''
81 '''
82
82
83 from mercurial import commands, cmdutil, dispatch, filelog, revlog, extensions
83 from mercurial import commands, cmdutil, dispatch, filelog, revlog, extensions
84 from mercurial import patch, localrepo, templater, templatefilters, util
84 from mercurial import patch, localrepo, templater, templatefilters, util
85 from mercurial.hgweb import webcommands
85 from mercurial.hgweb import webcommands
86 from mercurial.lock import release
86 from mercurial.lock import release
87 from mercurial.node import nullid, hex
87 from mercurial.node import nullid, hex
88 from mercurial.i18n import _
88 from mercurial.i18n import _
89 import re, shutil, tempfile, time
89 import re, shutil, tempfile, time
90
90
91 commands.optionalrepo += ' kwdemo'
91 commands.optionalrepo += ' kwdemo'
92
92
93 # hg commands that do not act on keywords
93 # hg commands that do not act on keywords
94 nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
94 nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
95 ' log outgoing push rename rollback tip verify'
95 ' log outgoing push rename rollback tip verify'
96 ' convert email glog')
96 ' convert email glog')
97
97
98 # hg commands that trigger expansion only when writing to working dir,
98 # hg commands that trigger expansion only when writing to working dir,
99 # not when reading filelog, and unexpand when reading from working dir
99 # not when reading filelog, and unexpand when reading from working dir
100 restricted = 'merge record resolve qfold qimport qnew qpush qrefresh qrecord'
100 restricted = 'merge record resolve qfold qimport qnew qpush qrefresh qrecord'
101
101
102 def utcdate(date):
102 def utcdate(date):
103 '''Returns hgdate in cvs-like UTC format.'''
103 '''Returns hgdate in cvs-like UTC format.'''
104 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
104 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
105
105
106 # make keyword tools accessible
106 # make keyword tools accessible
107 kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
107 kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
108
108
109
109
110 class kwtemplater(object):
110 class kwtemplater(object):
111 '''
111 '''
112 Sets up keyword templates, corresponding keyword regex, and
112 Sets up keyword templates, corresponding keyword regex, and
113 provides keyword substitution functions.
113 provides keyword substitution functions.
114 '''
114 '''
115 templates = {
115 templates = {
116 'Revision': '{node|short}',
116 'Revision': '{node|short}',
117 'Author': '{author|user}',
117 'Author': '{author|user}',
118 'Date': '{date|utcdate}',
118 'Date': '{date|utcdate}',
119 'RCSFile': '{file|basename},v',
119 'RCSFile': '{file|basename},v',
120 'Source': '{root}/{file},v',
120 'Source': '{root}/{file},v',
121 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
121 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
122 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
122 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
123 }
123 }
124
124
125 def __init__(self, ui, repo):
125 def __init__(self, ui, repo):
126 self.ui = ui
126 self.ui = ui
127 self.repo = repo
127 self.repo = repo
128 self.matcher = util.matcher(repo.root,
128 self.matcher = util.matcher(repo.root,
129 inc=kwtools['inc'], exc=kwtools['exc'])[1]
129 inc=kwtools['inc'], exc=kwtools['exc'])[1]
130 self.restrict = kwtools['hgcmd'] in restricted.split()
130 self.restrict = kwtools['hgcmd'] in restricted.split()
131
131
132 kwmaps = self.ui.configitems('keywordmaps')
132 kwmaps = self.ui.configitems('keywordmaps')
133 if kwmaps: # override default templates
133 if kwmaps: # override default templates
134 kwmaps = [(k, templater.parsestring(v, False))
134 kwmaps = [(k, templater.parsestring(v, False))
135 for (k, v) in kwmaps]
135 for (k, v) in kwmaps]
136 self.templates = dict(kwmaps)
136 self.templates = dict(kwmaps)
137 escaped = map(re.escape, self.templates.keys())
137 escaped = map(re.escape, self.templates.keys())
138 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
138 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
139 self.re_kw = re.compile(kwpat)
139 self.re_kw = re.compile(kwpat)
140
140
141 templatefilters.filters['utcdate'] = utcdate
141 templatefilters.filters['utcdate'] = utcdate
142 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
142 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
143 False, None, '', False)
143 False, None, '', False)
144
144
145 def substitute(self, data, path, ctx, subfunc):
145 def substitute(self, data, path, ctx, subfunc):
146 '''Replaces keywords in data with expanded template.'''
146 '''Replaces keywords in data with expanded template.'''
147 def kwsub(mobj):
147 def kwsub(mobj):
148 kw = mobj.group(1)
148 kw = mobj.group(1)
149 self.ct.use_template(self.templates[kw])
149 self.ct.use_template(self.templates[kw])
150 self.ui.pushbuffer()
150 self.ui.pushbuffer()
151 self.ct.show(ctx, root=self.repo.root, file=path)
151 self.ct.show(ctx, root=self.repo.root, file=path)
152 ekw = templatefilters.firstline(self.ui.popbuffer())
152 ekw = templatefilters.firstline(self.ui.popbuffer())
153 return '$%s: %s $' % (kw, ekw)
153 return '$%s: %s $' % (kw, ekw)
154 return subfunc(kwsub, data)
154 return subfunc(kwsub, data)
155
155
156 def expand(self, path, node, data):
156 def expand(self, path, node, data):
157 '''Returns data with keywords expanded.'''
157 '''Returns data with keywords expanded.'''
158 if not self.restrict and self.matcher(path) and not util.binary(data):
158 if not self.restrict and self.matcher(path) and not util.binary(data):
159 ctx = self.repo.filectx(path, fileid=node).changectx()
159 ctx = self.repo.filectx(path, fileid=node).changectx()
160 return self.substitute(data, path, ctx, self.re_kw.sub)
160 return self.substitute(data, path, ctx, self.re_kw.sub)
161 return data
161 return data
162
162
163 def iskwfile(self, path, flagfunc):
163 def iskwfile(self, path, flagfunc):
164 '''Returns true if path matches [keyword] pattern
164 '''Returns true if path matches [keyword] pattern
165 and is not a symbolic link.
165 and is not a symbolic link.
166 Caveat: localrepository._link fails on Windows.'''
166 Caveat: localrepository._link fails on Windows.'''
167 return self.matcher(path) and not 'l' in flagfunc(path)
167 return self.matcher(path) and not 'l' in flagfunc(path)
168
168
169 def overwrite(self, node, expand, files):
169 def overwrite(self, node, expand, files):
170 '''Overwrites selected files expanding/shrinking keywords.'''
170 '''Overwrites selected files expanding/shrinking keywords.'''
171 ctx = self.repo[node]
171 ctx = self.repo[node]
172 mf = ctx.manifest()
172 mf = ctx.manifest()
173 if node is not None: # commit
173 if node is not None: # commit
174 files = [f for f in ctx.files() if f in mf]
174 files = [f for f in ctx.files() if f in mf]
175 notify = self.ui.debug
175 notify = self.ui.debug
176 else: # kwexpand/kwshrink
176 else: # kwexpand/kwshrink
177 notify = self.ui.note
177 notify = self.ui.note
178 candidates = [f for f in files if self.iskwfile(f, ctx.flags)]
178 candidates = [f for f in files if self.iskwfile(f, ctx.flags)]
179 if candidates:
179 if candidates:
180 self.restrict = True # do not expand when reading
180 self.restrict = True # do not expand when reading
181 msg = (expand and _('overwriting %s expanding keywords\n')
181 msg = (expand and _('overwriting %s expanding keywords\n')
182 or _('overwriting %s shrinking keywords\n'))
182 or _('overwriting %s shrinking keywords\n'))
183 for f in candidates:
183 for f in candidates:
184 fp = self.repo.file(f)
184 fp = self.repo.file(f)
185 data = fp.read(mf[f])
185 data = fp.read(mf[f])
186 if util.binary(data):
186 if util.binary(data):
187 continue
187 continue
188 if expand:
188 if expand:
189 if node is None:
189 if node is None:
190 ctx = self.repo.filectx(f, fileid=mf[f]).changectx()
190 ctx = self.repo.filectx(f, fileid=mf[f]).changectx()
191 data, found = self.substitute(data, f, ctx,
191 data, found = self.substitute(data, f, ctx,
192 self.re_kw.subn)
192 self.re_kw.subn)
193 else:
193 else:
194 found = self.re_kw.search(data)
194 found = self.re_kw.search(data)
195 if found:
195 if found:
196 notify(msg % f)
196 notify(msg % f)
197 self.repo.wwrite(f, data, mf.flags(f))
197 self.repo.wwrite(f, data, mf.flags(f))
198 self.repo.dirstate.normal(f)
198 self.repo.dirstate.normal(f)
199 self.restrict = False
199 self.restrict = False
200
200
201 def shrinktext(self, text):
201 def shrinktext(self, text):
202 '''Unconditionally removes all keyword substitutions from text.'''
202 '''Unconditionally removes all keyword substitutions from text.'''
203 return self.re_kw.sub(r'$\1$', text)
203 return self.re_kw.sub(r'$\1$', text)
204
204
205 def shrink(self, fname, text):
205 def shrink(self, fname, text):
206 '''Returns text with all keyword substitutions removed.'''
206 '''Returns text with all keyword substitutions removed.'''
207 if self.matcher(fname) and not util.binary(text):
207 if self.matcher(fname) and not util.binary(text):
208 return self.shrinktext(text)
208 return self.shrinktext(text)
209 return text
209 return text
210
210
211 def shrinklines(self, fname, lines):
211 def shrinklines(self, fname, lines):
212 '''Returns lines with keyword substitutions removed.'''
212 '''Returns lines with keyword substitutions removed.'''
213 if self.matcher(fname):
213 if self.matcher(fname):
214 text = ''.join(lines)
214 text = ''.join(lines)
215 if not util.binary(text):
215 if not util.binary(text):
216 return self.shrinktext(text).splitlines(True)
216 return self.shrinktext(text).splitlines(True)
217 return lines
217 return lines
218
218
219 def wread(self, fname, data):
219 def wread(self, fname, data):
220 '''If in restricted mode returns data read from wdir with
220 '''If in restricted mode returns data read from wdir with
221 keyword substitutions removed.'''
221 keyword substitutions removed.'''
222 return self.restrict and self.shrink(fname, data) or data
222 return self.restrict and self.shrink(fname, data) or data
223
223
224 class kwfilelog(filelog.filelog):
224 class kwfilelog(filelog.filelog):
225 '''
225 '''
226 Subclass of filelog to hook into its read, add, cmp methods.
226 Subclass of filelog to hook into its read, add, cmp methods.
227 Keywords are "stored" unexpanded, and processed on reading.
227 Keywords are "stored" unexpanded, and processed on reading.
228 '''
228 '''
229 def __init__(self, opener, kwt, path):
229 def __init__(self, opener, kwt, path):
230 super(kwfilelog, self).__init__(opener, path)
230 super(kwfilelog, self).__init__(opener, path)
231 self.kwt = kwt
231 self.kwt = kwt
232 self.path = path
232 self.path = path
233
233
234 def read(self, node):
234 def read(self, node):
235 '''Expands keywords when reading filelog.'''
235 '''Expands keywords when reading filelog.'''
236 data = super(kwfilelog, self).read(node)
236 data = super(kwfilelog, self).read(node)
237 return self.kwt.expand(self.path, node, data)
237 return self.kwt.expand(self.path, node, data)
238
238
239 def add(self, text, meta, tr, link, p1=None, p2=None):
239 def add(self, text, meta, tr, link, p1=None, p2=None):
240 '''Removes keyword substitutions when adding to filelog.'''
240 '''Removes keyword substitutions when adding to filelog.'''
241 text = self.kwt.shrink(self.path, text)
241 text = self.kwt.shrink(self.path, text)
242 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
242 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
243
243
244 def cmp(self, node, text):
244 def cmp(self, node, text):
245 '''Removes keyword substitutions for comparison.'''
245 '''Removes keyword substitutions for comparison.'''
246 text = self.kwt.shrink(self.path, text)
246 text = self.kwt.shrink(self.path, text)
247 if self.renamed(node):
247 if self.renamed(node):
248 t2 = super(kwfilelog, self).read(node)
248 t2 = super(kwfilelog, self).read(node)
249 return t2 != text
249 return t2 != text
250 return revlog.revlog.cmp(self, node, text)
250 return revlog.revlog.cmp(self, node, text)
251
251
252 def _status(ui, repo, kwt, unknown, *pats, **opts):
252 def _status(ui, repo, kwt, unknown, *pats, **opts):
253 '''Bails out if [keyword] configuration is not active.
253 '''Bails out if [keyword] configuration is not active.
254 Returns status of working directory.'''
254 Returns status of working directory.'''
255 if kwt:
255 if kwt:
256 matcher = cmdutil.match(repo, pats, opts)
256 matcher = cmdutil.match(repo, pats, opts)
257 return repo.status(match=matcher, unknown=unknown, clean=True)
257 return repo.status(match=matcher, unknown=unknown, clean=True)
258 if ui.configitems('keyword'):
258 if ui.configitems('keyword'):
259 raise util.Abort(_('[keyword] patterns cannot match'))
259 raise util.Abort(_('[keyword] patterns cannot match'))
260 raise util.Abort(_('no [keyword] patterns configured'))
260 raise util.Abort(_('no [keyword] patterns configured'))
261
261
262 def _kwfwrite(ui, repo, expand, *pats, **opts):
262 def _kwfwrite(ui, repo, expand, *pats, **opts):
263 '''Selects files and passes them to kwtemplater.overwrite.'''
263 '''Selects files and passes them to kwtemplater.overwrite.'''
264 if repo.dirstate.parents()[1] != nullid:
264 if repo.dirstate.parents()[1] != nullid:
265 raise util.Abort(_('outstanding uncommitted merge'))
265 raise util.Abort(_('outstanding uncommitted merge'))
266 kwt = kwtools['templater']
266 kwt = kwtools['templater']
267 status = _status(ui, repo, kwt, False, *pats, **opts)
267 status = _status(ui, repo, kwt, False, *pats, **opts)
268 modified, added, removed, deleted = status[:4]
268 modified, added, removed, deleted = status[:4]
269 if modified or added or removed or deleted:
269 if modified or added or removed or deleted:
270 raise util.Abort(_('outstanding uncommitted changes'))
270 raise util.Abort(_('outstanding uncommitted changes'))
271 wlock = lock = None
271 wlock = lock = None
272 try:
272 try:
273 wlock = repo.wlock()
273 wlock = repo.wlock()
274 lock = repo.lock()
274 lock = repo.lock()
275 kwt.overwrite(None, expand, status[6])
275 kwt.overwrite(None, expand, status[6])
276 finally:
276 finally:
277 release(lock, wlock)
277 release(lock, wlock)
278
278
279 def demo(ui, repo, *args, **opts):
279 def demo(ui, repo, *args, **opts):
280 '''print [keywordmaps] configuration and an expansion example
280 '''print [keywordmaps] configuration and an expansion example
281
281
282 Show current, custom, or default keyword template maps and their
282 Show current, custom, or default keyword template maps and their
283 expansion.
283 expansion.
284
284
285 Extend current configuration by specifying maps as arguments and
285 Extend current configuration by specifying maps as arguments and
286 optionally by reading from an additional hgrc file.
286 optionally by reading from an additional hgrc file.
287
287
288 Override current keyword template maps with "default" option.
288 Override current keyword template maps with "default" option.
289 '''
289 '''
290 def demostatus(stat):
290 def demostatus(stat):
291 ui.status(_('\n\t%s\n') % stat)
291 ui.status(_('\n\t%s\n') % stat)
292
292
293 def demoitems(section, items):
293 def demoitems(section, items):
294 ui.write('[%s]\n' % section)
294 ui.write('[%s]\n' % section)
295 for k, v in items:
295 for k, v in items:
296 ui.write('%s = %s\n' % (k, v))
296 ui.write('%s = %s\n' % (k, v))
297
297
298 msg = 'hg keyword config and expansion example'
298 msg = 'hg keyword config and expansion example'
299 kwstatus = 'current'
299 kwstatus = 'current'
300 fn = 'demo.txt'
300 fn = 'demo.txt'
301 branchname = 'demobranch'
301 branchname = 'demobranch'
302 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
302 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
303 ui.note(_('creating temporary repository at %s\n') % tmpdir)
303 ui.note(_('creating temporary repository at %s\n') % tmpdir)
304 repo = localrepo.localrepository(ui, tmpdir, True)
304 repo = localrepo.localrepository(ui, tmpdir, True)
305 ui.setconfig('keyword', fn, '')
305 ui.setconfig('keyword', fn, '')
306 if args or opts.get('rcfile'):
306 if args or opts.get('rcfile'):
307 kwstatus = 'custom'
307 kwstatus = 'custom'
308 if opts.get('rcfile'):
308 if opts.get('rcfile'):
309 ui.readconfig(opts.get('rcfile'))
309 ui.readconfig(opts.get('rcfile'))
310 if opts.get('default'):
310 if opts.get('default'):
311 kwstatus = 'default'
311 kwstatus = 'default'
312 kwmaps = kwtemplater.templates
312 kwmaps = kwtemplater.templates
313 if ui.configitems('keywordmaps'):
313 if ui.configitems('keywordmaps'):
314 # override maps from optional rcfile
314 # override maps from optional rcfile
315 for k, v in kwmaps.iteritems():
315 for k, v in kwmaps.iteritems():
316 ui.setconfig('keywordmaps', k, v)
316 ui.setconfig('keywordmaps', k, v)
317 elif args:
317 elif args:
318 # simulate hgrc parsing
318 # simulate hgrc parsing
319 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
319 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
320 fp = repo.opener('hgrc', 'w')
320 fp = repo.opener('hgrc', 'w')
321 fp.writelines(rcmaps)
321 fp.writelines(rcmaps)
322 fp.close()
322 fp.close()
323 ui.readconfig(repo.join('hgrc'))
323 ui.readconfig(repo.join('hgrc'))
324 if not opts.get('default'):
324 if not opts.get('default'):
325 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
325 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
326 uisetup(ui)
326 uisetup(ui)
327 reposetup(ui, repo)
327 reposetup(ui, repo)
328 for k, v in ui.configitems('extensions'):
328 for k, v in ui.configitems('extensions'):
329 if k.endswith('keyword'):
329 if k.endswith('keyword'):
330 extension = '%s = %s' % (k, v)
330 extension = '%s = %s' % (k, v)
331 break
331 break
332 demostatus('config using %s keyword template maps' % kwstatus)
332 demostatus('config using %s keyword template maps' % kwstatus)
333 ui.write('[extensions]\n%s\n' % extension)
333 ui.write('[extensions]\n%s\n' % extension)
334 demoitems('keyword', ui.configitems('keyword'))
334 demoitems('keyword', ui.configitems('keyword'))
335 demoitems('keywordmaps', kwmaps.iteritems())
335 demoitems('keywordmaps', kwmaps.iteritems())
336 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
336 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
337 repo.wopener(fn, 'w').write(keywords)
337 repo.wopener(fn, 'w').write(keywords)
338 repo.add([fn])
338 repo.add([fn])
339 path = repo.wjoin(fn)
339 path = repo.wjoin(fn)
340 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
340 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
341 ui.note(keywords)
341 ui.note(keywords)
342 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
342 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
343 # silence branch command if not verbose
343 # silence branch command if not verbose
344 quiet = ui.quiet
344 quiet = ui.quiet
345 ui.quiet = not ui.verbose
345 ui.quiet = not ui.verbose
346 commands.branch(ui, repo, branchname)
346 commands.branch(ui, repo, branchname)
347 ui.quiet = quiet
347 ui.quiet = quiet
348 for name, cmd in ui.configitems('hooks'):
348 for name, cmd in ui.configitems('hooks'):
349 if name.split('.', 1)[0].find('commit') > -1:
349 if name.split('.', 1)[0].find('commit') > -1:
350 repo.ui.setconfig('hooks', name, '')
350 repo.ui.setconfig('hooks', name, '')
351 ui.note(_('unhooked all commit hooks\n'))
351 ui.note(_('unhooked all commit hooks\n'))
352 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
352 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
353 repo.commit(text=msg)
353 repo.commit(text=msg)
354 fmt = ui.verbose and ' in %s' % path or ''
354 fmt = ui.verbose and ' in %s' % path or ''
355 demostatus('%s keywords expanded%s' % (kwstatus, fmt))
355 demostatus('%s keywords expanded%s' % (kwstatus, fmt))
356 ui.write(repo.wread(fn))
356 ui.write(repo.wread(fn))
357 ui.debug(_('\nremoving temporary repository %s\n') % tmpdir)
357 ui.debug(_('\nremoving temporary repository %s\n') % tmpdir)
358 shutil.rmtree(tmpdir, ignore_errors=True)
358 shutil.rmtree(tmpdir, ignore_errors=True)
359
359
360 def expand(ui, repo, *pats, **opts):
360 def expand(ui, repo, *pats, **opts):
361 '''expand keywords in working directory
361 '''expand keywords in working directory
362
362
363 Run after (re)enabling keyword expansion.
363 Run after (re)enabling keyword expansion.
364
364
365 kwexpand refuses to run if given files contain local changes.
365 kwexpand refuses to run if given files contain local changes.
366 '''
366 '''
367 # 3rd argument sets expansion to True
367 # 3rd argument sets expansion to True
368 _kwfwrite(ui, repo, True, *pats, **opts)
368 _kwfwrite(ui, repo, True, *pats, **opts)
369
369
370 def files(ui, repo, *pats, **opts):
370 def files(ui, repo, *pats, **opts):
371 '''print files currently configured for keyword expansion
371 '''print files currently configured for keyword expansion
372
372
373 Crosscheck which files in working directory are potential targets
373 Crosscheck which files in working directory are potential targets
374 for keyword expansion. That is, files matched by [keyword] config
374 for keyword expansion. That is, files matched by [keyword] config
375 patterns but not symlinks.
375 patterns but not symlinks.
376 '''
376 '''
377 kwt = kwtools['templater']
377 kwt = kwtools['templater']
378 status = _status(ui, repo, kwt, opts.get('untracked'), *pats, **opts)
378 status = _status(ui, repo, kwt, opts.get('untracked'), *pats, **opts)
379 modified, added, removed, deleted, unknown, ignored, clean = status
379 modified, added, removed, deleted, unknown, ignored, clean = status
380 files = sorted(modified + added + clean + unknown)
380 files = sorted(modified + added + clean + unknown)
381 wctx = repo[None]
381 wctx = repo[None]
382 kwfiles = [f for f in files if kwt.iskwfile(f, wctx.flags)]
382 kwfiles = [f for f in files if kwt.iskwfile(f, wctx.flags)]
383 cwd = pats and repo.getcwd() or ''
383 cwd = pats and repo.getcwd() or ''
384 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
384 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
385 if opts.get('all') or opts.get('ignore'):
385 if opts.get('all') or opts.get('ignore'):
386 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
386 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
387 for char, filenames in kwfstats:
387 for char, filenames in kwfstats:
388 fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
388 fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
389 for f in filenames:
389 for f in filenames:
390 ui.write(fmt % repo.pathto(f, cwd))
390 ui.write(fmt % repo.pathto(f, cwd))
391
391
392 def shrink(ui, repo, *pats, **opts):
392 def shrink(ui, repo, *pats, **opts):
393 '''revert expanded keywords in working directory
393 '''revert expanded keywords in working directory
394
394
395 Run before changing/disabling active keywords or if you experience
395 Run before changing/disabling active keywords or if you experience
396 problems with "hg import" or "hg merge".
396 problems with "hg import" or "hg merge".
397
397
398 kwshrink refuses to run if given files contain local changes.
398 kwshrink refuses to run if given files contain local changes.
399 '''
399 '''
400 # 3rd argument sets expansion to False
400 # 3rd argument sets expansion to False
401 _kwfwrite(ui, repo, False, *pats, **opts)
401 _kwfwrite(ui, repo, False, *pats, **opts)
402
402
403
403
404 def uisetup(ui):
404 def uisetup(ui):
405 '''Collects [keyword] config in kwtools.
405 '''Collects [keyword] config in kwtools.
406 Monkeypatches dispatch._parse if needed.'''
406 Monkeypatches dispatch._parse if needed.'''
407
407
408 for pat, opt in ui.configitems('keyword'):
408 for pat, opt in ui.configitems('keyword'):
409 if opt != 'ignore':
409 if opt != 'ignore':
410 kwtools['inc'].append(pat)
410 kwtools['inc'].append(pat)
411 else:
411 else:
412 kwtools['exc'].append(pat)
412 kwtools['exc'].append(pat)
413
413
414 if kwtools['inc']:
414 if kwtools['inc']:
415 def kwdispatch_parse(orig, ui, args):
415 def kwdispatch_parse(orig, ui, args):
416 '''Monkeypatch dispatch._parse to obtain running hg command.'''
416 '''Monkeypatch dispatch._parse to obtain running hg command.'''
417 cmd, func, args, options, cmdoptions = orig(ui, args)
417 cmd, func, args, options, cmdoptions = orig(ui, args)
418 kwtools['hgcmd'] = cmd
418 kwtools['hgcmd'] = cmd
419 return cmd, func, args, options, cmdoptions
419 return cmd, func, args, options, cmdoptions
420
420
421 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
421 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
422
422
423 def reposetup(ui, repo):
423 def reposetup(ui, repo):
424 '''Sets up repo as kwrepo for keyword substitution.
424 '''Sets up repo as kwrepo for keyword substitution.
425 Overrides file method to return kwfilelog instead of filelog
425 Overrides file method to return kwfilelog instead of filelog
426 if file matches user configuration.
426 if file matches user configuration.
427 Wraps commit to overwrite configured files with updated
427 Wraps commit to overwrite configured files with updated
428 keyword substitutions.
428 keyword substitutions.
429 Monkeypatches patch and webcommands.'''
429 Monkeypatches patch and webcommands.'''
430
430
431 try:
431 try:
432 if (not repo.local() or not kwtools['inc']
432 if (not repo.local() or not kwtools['inc']
433 or kwtools['hgcmd'] in nokwcommands.split()
433 or kwtools['hgcmd'] in nokwcommands.split()
434 or '.hg' in util.splitpath(repo.root)
434 or '.hg' in util.splitpath(repo.root)
435 or repo._url.startswith('bundle:')):
435 or repo._url.startswith('bundle:')):
436 return
436 return
437 except AttributeError:
437 except AttributeError:
438 pass
438 pass
439
439
440 kwtools['templater'] = kwt = kwtemplater(ui, repo)
440 kwtools['templater'] = kwt = kwtemplater(ui, repo)
441
441
442 class kwrepo(repo.__class__):
442 class kwrepo(repo.__class__):
443 def file(self, f):
443 def file(self, f):
444 if f[0] == '/':
444 if f[0] == '/':
445 f = f[1:]
445 f = f[1:]
446 return kwfilelog(self.sopener, kwt, f)
446 return kwfilelog(self.sopener, kwt, f)
447
447
448 def wread(self, filename):
448 def wread(self, filename):
449 data = super(kwrepo, self).wread(filename)
449 data = super(kwrepo, self).wread(filename)
450 return kwt.wread(filename, data)
450 return kwt.wread(filename, data)
451
451
452 def commit(self, files=None, text='', user=None, date=None,
452 def commit(self, files=None, text='', user=None, date=None,
453 match=None, force=False, force_editor=False,
453 match=None, force=False, editor=None, extra={}):
454 extra={}, empty_ok=False):
455 wlock = lock = None
454 wlock = lock = None
456 _p1 = _p2 = None
455 _p1 = _p2 = None
457 try:
456 try:
458 wlock = self.wlock()
457 wlock = self.wlock()
459 lock = self.lock()
458 lock = self.lock()
460 # store and postpone commit hooks
459 # store and postpone commit hooks
461 commithooks = {}
460 commithooks = {}
462 for name, cmd in ui.configitems('hooks'):
461 for name, cmd in ui.configitems('hooks'):
463 if name.split('.', 1)[0] == 'commit':
462 if name.split('.', 1)[0] == 'commit':
464 commithooks[name] = cmd
463 commithooks[name] = cmd
465 ui.setconfig('hooks', name, None)
464 ui.setconfig('hooks', name, None)
466 if commithooks:
465 if commithooks:
467 # store parents for commit hook environment
466 # store parents for commit hook environment
468 _p1, _p2 = repo.dirstate.parents()
467 _p1, _p2 = repo.dirstate.parents()
469 _p1 = hex(_p1)
468 _p1 = hex(_p1)
470 if _p2 == nullid:
469 if _p2 == nullid:
471 _p2 = ''
470 _p2 = ''
472 else:
471 else:
473 _p2 = hex(_p2)
472 _p2 = hex(_p2)
474
473
475 n = super(kwrepo, self).commit(files, text, user, date, match,
474 n = super(kwrepo, self).commit(files, text, user, date, match,
476 force, force_editor,
475 force, editor, extra)
477 extra, empty_ok)
478
476
479 # restore commit hooks
477 # restore commit hooks
480 for name, cmd in commithooks.iteritems():
478 for name, cmd in commithooks.iteritems():
481 ui.setconfig('hooks', name, cmd)
479 ui.setconfig('hooks', name, cmd)
482 if n is not None:
480 if n is not None:
483 kwt.overwrite(n, True, None)
481 kwt.overwrite(n, True, None)
484 repo.hook('commit', node=n, parent1=_p1, parent2=_p2)
482 repo.hook('commit', node=n, parent1=_p1, parent2=_p2)
485 return n
483 return n
486 finally:
484 finally:
487 release(lock, wlock)
485 release(lock, wlock)
488
486
489 # monkeypatches
487 # monkeypatches
490 def kwpatchfile_init(orig, self, ui, fname, opener, missing=False):
488 def kwpatchfile_init(orig, self, ui, fname, opener, missing=False):
491 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
489 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
492 rejects or conflicts due to expanded keywords in working dir.'''
490 rejects or conflicts due to expanded keywords in working dir.'''
493 orig(self, ui, fname, opener, missing)
491 orig(self, ui, fname, opener, missing)
494 # shrink keywords read from working dir
492 # shrink keywords read from working dir
495 self.lines = kwt.shrinklines(self.fname, self.lines)
493 self.lines = kwt.shrinklines(self.fname, self.lines)
496
494
497 def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
495 def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
498 opts=None):
496 opts=None):
499 '''Monkeypatch patch.diff to avoid expansion except when
497 '''Monkeypatch patch.diff to avoid expansion except when
500 comparing against working dir.'''
498 comparing against working dir.'''
501 if node2 is not None:
499 if node2 is not None:
502 kwt.matcher = util.never
500 kwt.matcher = util.never
503 elif node1 is not None and node1 != repo['.'].node():
501 elif node1 is not None and node1 != repo['.'].node():
504 kwt.restrict = True
502 kwt.restrict = True
505 return orig(repo, node1, node2, match, changes, opts)
503 return orig(repo, node1, node2, match, changes, opts)
506
504
507 def kwweb_skip(orig, web, req, tmpl):
505 def kwweb_skip(orig, web, req, tmpl):
508 '''Wraps webcommands.x turning off keyword expansion.'''
506 '''Wraps webcommands.x turning off keyword expansion.'''
509 kwt.matcher = util.never
507 kwt.matcher = util.never
510 return orig(web, req, tmpl)
508 return orig(web, req, tmpl)
511
509
512 repo.__class__ = kwrepo
510 repo.__class__ = kwrepo
513
511
514 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
512 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
515 extensions.wrapfunction(patch, 'diff', kw_diff)
513 extensions.wrapfunction(patch, 'diff', kw_diff)
516 for c in 'annotate changeset rev filediff diff'.split():
514 for c in 'annotate changeset rev filediff diff'.split():
517 extensions.wrapfunction(webcommands, c, kwweb_skip)
515 extensions.wrapfunction(webcommands, c, kwweb_skip)
518
516
519 cmdtable = {
517 cmdtable = {
520 'kwdemo':
518 'kwdemo':
521 (demo,
519 (demo,
522 [('d', 'default', None, _('show default keyword template maps')),
520 [('d', 'default', None, _('show default keyword template maps')),
523 ('f', 'rcfile', [], _('read maps from rcfile'))],
521 ('f', 'rcfile', [], _('read maps from rcfile'))],
524 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
522 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
525 'kwexpand': (expand, commands.walkopts,
523 'kwexpand': (expand, commands.walkopts,
526 _('hg kwexpand [OPTION]... [FILE]...')),
524 _('hg kwexpand [OPTION]... [FILE]...')),
527 'kwfiles':
525 'kwfiles':
528 (files,
526 (files,
529 [('a', 'all', None, _('show keyword status flags of all files')),
527 [('a', 'all', None, _('show keyword status flags of all files')),
530 ('i', 'ignore', None, _('show files excluded from expansion')),
528 ('i', 'ignore', None, _('show files excluded from expansion')),
531 ('u', 'untracked', None, _('additionally show untracked files')),
529 ('u', 'untracked', None, _('additionally show untracked files')),
532 ] + commands.walkopts,
530 ] + commands.walkopts,
533 _('hg kwfiles [OPTION]... [FILE]...')),
531 _('hg kwfiles [OPTION]... [FILE]...')),
534 'kwshrink': (shrink, commands.walkopts,
532 'kwshrink': (shrink, commands.walkopts,
535 _('hg kwshrink [OPTION]... [FILE]...')),
533 _('hg kwshrink [OPTION]... [FILE]...')),
536 }
534 }
@@ -1,1223 +1,1260 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, bisect, stat, errno
10 import os, sys, bisect, stat, errno
11 import mdiff, bdiff, util, templater, patch, error, encoding
11 import mdiff, bdiff, util, templater, patch, error, encoding
12 import match as _match
12 import match as _match
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 def findpossible(cmd, table, strict=False):
16 def findpossible(cmd, table, strict=False):
17 """
17 """
18 Return cmd -> (aliases, command table entry)
18 Return cmd -> (aliases, command table entry)
19 for each matching command.
19 for each matching command.
20 Return debug commands (or their aliases) only if no normal command matches.
20 Return debug commands (or their aliases) only if no normal command matches.
21 """
21 """
22 choice = {}
22 choice = {}
23 debugchoice = {}
23 debugchoice = {}
24 for e in table.keys():
24 for e in table.keys():
25 aliases = e.lstrip("^").split("|")
25 aliases = e.lstrip("^").split("|")
26 found = None
26 found = None
27 if cmd in aliases:
27 if cmd in aliases:
28 found = cmd
28 found = cmd
29 elif not strict:
29 elif not strict:
30 for a in aliases:
30 for a in aliases:
31 if a.startswith(cmd):
31 if a.startswith(cmd):
32 found = a
32 found = a
33 break
33 break
34 if found is not None:
34 if found is not None:
35 if aliases[0].startswith("debug") or found.startswith("debug"):
35 if aliases[0].startswith("debug") or found.startswith("debug"):
36 debugchoice[found] = (aliases, table[e])
36 debugchoice[found] = (aliases, table[e])
37 else:
37 else:
38 choice[found] = (aliases, table[e])
38 choice[found] = (aliases, table[e])
39
39
40 if not choice and debugchoice:
40 if not choice and debugchoice:
41 choice = debugchoice
41 choice = debugchoice
42
42
43 return choice
43 return choice
44
44
45 def findcmd(cmd, table, strict=True):
45 def findcmd(cmd, table, strict=True):
46 """Return (aliases, command table entry) for command string."""
46 """Return (aliases, command table entry) for command string."""
47 choice = findpossible(cmd, table, strict)
47 choice = findpossible(cmd, table, strict)
48
48
49 if cmd in choice:
49 if cmd in choice:
50 return choice[cmd]
50 return choice[cmd]
51
51
52 if len(choice) > 1:
52 if len(choice) > 1:
53 clist = choice.keys()
53 clist = choice.keys()
54 clist.sort()
54 clist.sort()
55 raise error.AmbiguousCommand(cmd, clist)
55 raise error.AmbiguousCommand(cmd, clist)
56
56
57 if choice:
57 if choice:
58 return choice.values()[0]
58 return choice.values()[0]
59
59
60 raise error.UnknownCommand(cmd)
60 raise error.UnknownCommand(cmd)
61
61
62 def bail_if_changed(repo):
62 def bail_if_changed(repo):
63 if repo.dirstate.parents()[1] != nullid:
63 if repo.dirstate.parents()[1] != nullid:
64 raise util.Abort(_('outstanding uncommitted merge'))
64 raise util.Abort(_('outstanding uncommitted merge'))
65 modified, added, removed, deleted = repo.status()[:4]
65 modified, added, removed, deleted = repo.status()[:4]
66 if modified or added or removed or deleted:
66 if modified or added or removed or deleted:
67 raise util.Abort(_("outstanding uncommitted changes"))
67 raise util.Abort(_("outstanding uncommitted changes"))
68
68
69 def logmessage(opts):
69 def logmessage(opts):
70 """ get the log message according to -m and -l option """
70 """ get the log message according to -m and -l option """
71 message = opts.get('message')
71 message = opts.get('message')
72 logfile = opts.get('logfile')
72 logfile = opts.get('logfile')
73
73
74 if message and logfile:
74 if message and logfile:
75 raise util.Abort(_('options --message and --logfile are mutually '
75 raise util.Abort(_('options --message and --logfile are mutually '
76 'exclusive'))
76 'exclusive'))
77 if not message and logfile:
77 if not message and logfile:
78 try:
78 try:
79 if logfile == '-':
79 if logfile == '-':
80 message = sys.stdin.read()
80 message = sys.stdin.read()
81 else:
81 else:
82 message = open(logfile).read()
82 message = open(logfile).read()
83 except IOError, inst:
83 except IOError, inst:
84 raise util.Abort(_("can't read commit message '%s': %s") %
84 raise util.Abort(_("can't read commit message '%s': %s") %
85 (logfile, inst.strerror))
85 (logfile, inst.strerror))
86 return message
86 return message
87
87
88 def loglimit(opts):
88 def loglimit(opts):
89 """get the log limit according to option -l/--limit"""
89 """get the log limit according to option -l/--limit"""
90 limit = opts.get('limit')
90 limit = opts.get('limit')
91 if limit:
91 if limit:
92 try:
92 try:
93 limit = int(limit)
93 limit = int(limit)
94 except ValueError:
94 except ValueError:
95 raise util.Abort(_('limit must be a positive integer'))
95 raise util.Abort(_('limit must be a positive integer'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
97 else:
97 else:
98 limit = sys.maxint
98 limit = sys.maxint
99 return limit
99 return limit
100
100
101 def remoteui(src, opts):
101 def remoteui(src, opts):
102 'build a remote ui from ui or repo and opts'
102 'build a remote ui from ui or repo and opts'
103 if hasattr(src, 'baseui'): # looks like a repository
103 if hasattr(src, 'baseui'): # looks like a repository
104 dst = src.baseui # drop repo-specific config
104 dst = src.baseui # drop repo-specific config
105 src = src.ui # copy target options from repo
105 src = src.ui # copy target options from repo
106 else: # assume it's a global ui object
106 else: # assume it's a global ui object
107 dst = src # keep all global options
107 dst = src # keep all global options
108
108
109 # copy ssh-specific options
109 # copy ssh-specific options
110 for o in 'ssh', 'remotecmd':
110 for o in 'ssh', 'remotecmd':
111 v = opts.get(o) or src.config('ui', o)
111 v = opts.get(o) or src.config('ui', o)
112 if v:
112 if v:
113 dst.setconfig("ui", o, v)
113 dst.setconfig("ui", o, v)
114 # copy bundle-specific options
114 # copy bundle-specific options
115 r = src.config('bundle', 'mainreporoot')
115 r = src.config('bundle', 'mainreporoot')
116 if r:
116 if r:
117 dst.setconfig('bundle', 'mainreporoot', r)
117 dst.setconfig('bundle', 'mainreporoot', r)
118
118
119 return dst
119 return dst
120
120
121 def revpair(repo, revs):
121 def revpair(repo, revs):
122 '''return pair of nodes, given list of revisions. second item can
122 '''return pair of nodes, given list of revisions. second item can
123 be None, meaning use working dir.'''
123 be None, meaning use working dir.'''
124
124
125 def revfix(repo, val, defval):
125 def revfix(repo, val, defval):
126 if not val and val != 0 and defval is not None:
126 if not val and val != 0 and defval is not None:
127 val = defval
127 val = defval
128 return repo.lookup(val)
128 return repo.lookup(val)
129
129
130 if not revs:
130 if not revs:
131 return repo.dirstate.parents()[0], None
131 return repo.dirstate.parents()[0], None
132 end = None
132 end = None
133 if len(revs) == 1:
133 if len(revs) == 1:
134 if revrangesep in revs[0]:
134 if revrangesep in revs[0]:
135 start, end = revs[0].split(revrangesep, 1)
135 start, end = revs[0].split(revrangesep, 1)
136 start = revfix(repo, start, 0)
136 start = revfix(repo, start, 0)
137 end = revfix(repo, end, len(repo) - 1)
137 end = revfix(repo, end, len(repo) - 1)
138 else:
138 else:
139 start = revfix(repo, revs[0], None)
139 start = revfix(repo, revs[0], None)
140 elif len(revs) == 2:
140 elif len(revs) == 2:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
142 raise util.Abort(_('too many revisions specified'))
142 raise util.Abort(_('too many revisions specified'))
143 start = revfix(repo, revs[0], None)
143 start = revfix(repo, revs[0], None)
144 end = revfix(repo, revs[1], None)
144 end = revfix(repo, revs[1], None)
145 else:
145 else:
146 raise util.Abort(_('too many revisions specified'))
146 raise util.Abort(_('too many revisions specified'))
147 return start, end
147 return start, end
148
148
149 def revrange(repo, revs):
149 def revrange(repo, revs):
150 """Yield revision as strings from a list of revision specifications."""
150 """Yield revision as strings from a list of revision specifications."""
151
151
152 def revfix(repo, val, defval):
152 def revfix(repo, val, defval):
153 if not val and val != 0 and defval is not None:
153 if not val and val != 0 and defval is not None:
154 return defval
154 return defval
155 return repo.changelog.rev(repo.lookup(val))
155 return repo.changelog.rev(repo.lookup(val))
156
156
157 seen, l = set(), []
157 seen, l = set(), []
158 for spec in revs:
158 for spec in revs:
159 if revrangesep in spec:
159 if revrangesep in spec:
160 start, end = spec.split(revrangesep, 1)
160 start, end = spec.split(revrangesep, 1)
161 start = revfix(repo, start, 0)
161 start = revfix(repo, start, 0)
162 end = revfix(repo, end, len(repo) - 1)
162 end = revfix(repo, end, len(repo) - 1)
163 step = start > end and -1 or 1
163 step = start > end and -1 or 1
164 for rev in xrange(start, end+step, step):
164 for rev in xrange(start, end+step, step):
165 if rev in seen:
165 if rev in seen:
166 continue
166 continue
167 seen.add(rev)
167 seen.add(rev)
168 l.append(rev)
168 l.append(rev)
169 else:
169 else:
170 rev = revfix(repo, spec, None)
170 rev = revfix(repo, spec, None)
171 if rev in seen:
171 if rev in seen:
172 continue
172 continue
173 seen.add(rev)
173 seen.add(rev)
174 l.append(rev)
174 l.append(rev)
175
175
176 return l
176 return l
177
177
178 def make_filename(repo, pat, node,
178 def make_filename(repo, pat, node,
179 total=None, seqno=None, revwidth=None, pathname=None):
179 total=None, seqno=None, revwidth=None, pathname=None):
180 node_expander = {
180 node_expander = {
181 'H': lambda: hex(node),
181 'H': lambda: hex(node),
182 'R': lambda: str(repo.changelog.rev(node)),
182 'R': lambda: str(repo.changelog.rev(node)),
183 'h': lambda: short(node),
183 'h': lambda: short(node),
184 }
184 }
185 expander = {
185 expander = {
186 '%': lambda: '%',
186 '%': lambda: '%',
187 'b': lambda: os.path.basename(repo.root),
187 'b': lambda: os.path.basename(repo.root),
188 }
188 }
189
189
190 try:
190 try:
191 if node:
191 if node:
192 expander.update(node_expander)
192 expander.update(node_expander)
193 if node:
193 if node:
194 expander['r'] = (lambda:
194 expander['r'] = (lambda:
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
196 if total is not None:
196 if total is not None:
197 expander['N'] = lambda: str(total)
197 expander['N'] = lambda: str(total)
198 if seqno is not None:
198 if seqno is not None:
199 expander['n'] = lambda: str(seqno)
199 expander['n'] = lambda: str(seqno)
200 if total is not None and seqno is not None:
200 if total is not None and seqno is not None:
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
202 if pathname is not None:
202 if pathname is not None:
203 expander['s'] = lambda: os.path.basename(pathname)
203 expander['s'] = lambda: os.path.basename(pathname)
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
205 expander['p'] = lambda: pathname
205 expander['p'] = lambda: pathname
206
206
207 newname = []
207 newname = []
208 patlen = len(pat)
208 patlen = len(pat)
209 i = 0
209 i = 0
210 while i < patlen:
210 while i < patlen:
211 c = pat[i]
211 c = pat[i]
212 if c == '%':
212 if c == '%':
213 i += 1
213 i += 1
214 c = pat[i]
214 c = pat[i]
215 c = expander[c]()
215 c = expander[c]()
216 newname.append(c)
216 newname.append(c)
217 i += 1
217 i += 1
218 return ''.join(newname)
218 return ''.join(newname)
219 except KeyError, inst:
219 except KeyError, inst:
220 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
220 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
221 inst.args[0])
221 inst.args[0])
222
222
223 def make_file(repo, pat, node=None,
223 def make_file(repo, pat, node=None,
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
225
225
226 writable = 'w' in mode or 'a' in mode
226 writable = 'w' in mode or 'a' in mode
227
227
228 if not pat or pat == '-':
228 if not pat or pat == '-':
229 return writable and sys.stdout or sys.stdin
229 return writable and sys.stdout or sys.stdin
230 if hasattr(pat, 'write') and writable:
230 if hasattr(pat, 'write') and writable:
231 return pat
231 return pat
232 if hasattr(pat, 'read') and 'r' in mode:
232 if hasattr(pat, 'read') and 'r' in mode:
233 return pat
233 return pat
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
235 pathname),
235 pathname),
236 mode)
236 mode)
237
237
238 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
238 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
239 if not globbed and default == 'relpath':
239 if not globbed and default == 'relpath':
240 pats = util.expand_glob(pats or [])
240 pats = util.expand_glob(pats or [])
241 m = _match.match(repo.root, repo.getcwd(), pats,
241 m = _match.match(repo.root, repo.getcwd(), pats,
242 opts.get('include'), opts.get('exclude'), default)
242 opts.get('include'), opts.get('exclude'), default)
243 def badfn(f, msg):
243 def badfn(f, msg):
244 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
244 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
245 return False
245 return False
246 m.bad = badfn
246 m.bad = badfn
247 return m
247 return m
248
248
249 def matchall(repo):
249 def matchall(repo):
250 return _match.always(repo.root, repo.getcwd())
250 return _match.always(repo.root, repo.getcwd())
251
251
252 def matchfiles(repo, files):
252 def matchfiles(repo, files):
253 return _match.exact(repo.root, repo.getcwd(), files)
253 return _match.exact(repo.root, repo.getcwd(), files)
254
254
255 def findrenames(repo, added=None, removed=None, threshold=0.5):
255 def findrenames(repo, added=None, removed=None, threshold=0.5):
256 '''find renamed files -- yields (before, after, score) tuples'''
256 '''find renamed files -- yields (before, after, score) tuples'''
257 if added is None or removed is None:
257 if added is None or removed is None:
258 added, removed = repo.status()[1:3]
258 added, removed = repo.status()[1:3]
259 ctx = repo['.']
259 ctx = repo['.']
260 for a in added:
260 for a in added:
261 aa = repo.wread(a)
261 aa = repo.wread(a)
262 bestname, bestscore = None, threshold
262 bestname, bestscore = None, threshold
263 for r in removed:
263 for r in removed:
264 rr = ctx.filectx(r).data()
264 rr = ctx.filectx(r).data()
265
265
266 # bdiff.blocks() returns blocks of matching lines
266 # bdiff.blocks() returns blocks of matching lines
267 # count the number of bytes in each
267 # count the number of bytes in each
268 equal = 0
268 equal = 0
269 alines = mdiff.splitnewlines(aa)
269 alines = mdiff.splitnewlines(aa)
270 matches = bdiff.blocks(aa, rr)
270 matches = bdiff.blocks(aa, rr)
271 for x1,x2,y1,y2 in matches:
271 for x1,x2,y1,y2 in matches:
272 for line in alines[x1:x2]:
272 for line in alines[x1:x2]:
273 equal += len(line)
273 equal += len(line)
274
274
275 lengths = len(aa) + len(rr)
275 lengths = len(aa) + len(rr)
276 if lengths:
276 if lengths:
277 myscore = equal*2.0 / lengths
277 myscore = equal*2.0 / lengths
278 if myscore >= bestscore:
278 if myscore >= bestscore:
279 bestname, bestscore = r, myscore
279 bestname, bestscore = r, myscore
280 if bestname:
280 if bestname:
281 yield bestname, a, bestscore
281 yield bestname, a, bestscore
282
282
283 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
283 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
284 if dry_run is None:
284 if dry_run is None:
285 dry_run = opts.get('dry_run')
285 dry_run = opts.get('dry_run')
286 if similarity is None:
286 if similarity is None:
287 similarity = float(opts.get('similarity') or 0)
287 similarity = float(opts.get('similarity') or 0)
288 add, remove = [], []
288 add, remove = [], []
289 mapping = {}
289 mapping = {}
290 audit_path = util.path_auditor(repo.root)
290 audit_path = util.path_auditor(repo.root)
291 m = match(repo, pats, opts)
291 m = match(repo, pats, opts)
292 for abs in repo.walk(m):
292 for abs in repo.walk(m):
293 target = repo.wjoin(abs)
293 target = repo.wjoin(abs)
294 good = True
294 good = True
295 try:
295 try:
296 audit_path(abs)
296 audit_path(abs)
297 except:
297 except:
298 good = False
298 good = False
299 rel = m.rel(abs)
299 rel = m.rel(abs)
300 exact = m.exact(abs)
300 exact = m.exact(abs)
301 if good and abs not in repo.dirstate:
301 if good and abs not in repo.dirstate:
302 add.append(abs)
302 add.append(abs)
303 mapping[abs] = rel, m.exact(abs)
303 mapping[abs] = rel, m.exact(abs)
304 if repo.ui.verbose or not exact:
304 if repo.ui.verbose or not exact:
305 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
305 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
306 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
306 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
307 or (os.path.isdir(target) and not os.path.islink(target))):
307 or (os.path.isdir(target) and not os.path.islink(target))):
308 remove.append(abs)
308 remove.append(abs)
309 mapping[abs] = rel, exact
309 mapping[abs] = rel, exact
310 if repo.ui.verbose or not exact:
310 if repo.ui.verbose or not exact:
311 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
311 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
312 if not dry_run:
312 if not dry_run:
313 repo.remove(remove)
313 repo.remove(remove)
314 repo.add(add)
314 repo.add(add)
315 if similarity > 0:
315 if similarity > 0:
316 for old, new, score in findrenames(repo, add, remove, similarity):
316 for old, new, score in findrenames(repo, add, remove, similarity):
317 oldrel, oldexact = mapping[old]
317 oldrel, oldexact = mapping[old]
318 newrel, newexact = mapping[new]
318 newrel, newexact = mapping[new]
319 if repo.ui.verbose or not oldexact or not newexact:
319 if repo.ui.verbose or not oldexact or not newexact:
320 repo.ui.status(_('recording removal of %s as rename to %s '
320 repo.ui.status(_('recording removal of %s as rename to %s '
321 '(%d%% similar)\n') %
321 '(%d%% similar)\n') %
322 (oldrel, newrel, score * 100))
322 (oldrel, newrel, score * 100))
323 if not dry_run:
323 if not dry_run:
324 repo.copy(old, new)
324 repo.copy(old, new)
325
325
326 def copy(ui, repo, pats, opts, rename=False):
326 def copy(ui, repo, pats, opts, rename=False):
327 # called with the repo lock held
327 # called with the repo lock held
328 #
328 #
329 # hgsep => pathname that uses "/" to separate directories
329 # hgsep => pathname that uses "/" to separate directories
330 # ossep => pathname that uses os.sep to separate directories
330 # ossep => pathname that uses os.sep to separate directories
331 cwd = repo.getcwd()
331 cwd = repo.getcwd()
332 targets = {}
332 targets = {}
333 after = opts.get("after")
333 after = opts.get("after")
334 dryrun = opts.get("dry_run")
334 dryrun = opts.get("dry_run")
335
335
336 def walkpat(pat):
336 def walkpat(pat):
337 srcs = []
337 srcs = []
338 m = match(repo, [pat], opts, globbed=True)
338 m = match(repo, [pat], opts, globbed=True)
339 for abs in repo.walk(m):
339 for abs in repo.walk(m):
340 state = repo.dirstate[abs]
340 state = repo.dirstate[abs]
341 rel = m.rel(abs)
341 rel = m.rel(abs)
342 exact = m.exact(abs)
342 exact = m.exact(abs)
343 if state in '?r':
343 if state in '?r':
344 if exact and state == '?':
344 if exact and state == '?':
345 ui.warn(_('%s: not copying - file is not managed\n') % rel)
345 ui.warn(_('%s: not copying - file is not managed\n') % rel)
346 if exact and state == 'r':
346 if exact and state == 'r':
347 ui.warn(_('%s: not copying - file has been marked for'
347 ui.warn(_('%s: not copying - file has been marked for'
348 ' remove\n') % rel)
348 ' remove\n') % rel)
349 continue
349 continue
350 # abs: hgsep
350 # abs: hgsep
351 # rel: ossep
351 # rel: ossep
352 srcs.append((abs, rel, exact))
352 srcs.append((abs, rel, exact))
353 return srcs
353 return srcs
354
354
355 # abssrc: hgsep
355 # abssrc: hgsep
356 # relsrc: ossep
356 # relsrc: ossep
357 # otarget: ossep
357 # otarget: ossep
358 def copyfile(abssrc, relsrc, otarget, exact):
358 def copyfile(abssrc, relsrc, otarget, exact):
359 abstarget = util.canonpath(repo.root, cwd, otarget)
359 abstarget = util.canonpath(repo.root, cwd, otarget)
360 reltarget = repo.pathto(abstarget, cwd)
360 reltarget = repo.pathto(abstarget, cwd)
361 target = repo.wjoin(abstarget)
361 target = repo.wjoin(abstarget)
362 src = repo.wjoin(abssrc)
362 src = repo.wjoin(abssrc)
363 state = repo.dirstate[abstarget]
363 state = repo.dirstate[abstarget]
364
364
365 # check for collisions
365 # check for collisions
366 prevsrc = targets.get(abstarget)
366 prevsrc = targets.get(abstarget)
367 if prevsrc is not None:
367 if prevsrc is not None:
368 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
368 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
369 (reltarget, repo.pathto(abssrc, cwd),
369 (reltarget, repo.pathto(abssrc, cwd),
370 repo.pathto(prevsrc, cwd)))
370 repo.pathto(prevsrc, cwd)))
371 return
371 return
372
372
373 # check for overwrites
373 # check for overwrites
374 exists = os.path.exists(target)
374 exists = os.path.exists(target)
375 if not after and exists or after and state in 'mn':
375 if not after and exists or after and state in 'mn':
376 if not opts['force']:
376 if not opts['force']:
377 ui.warn(_('%s: not overwriting - file exists\n') %
377 ui.warn(_('%s: not overwriting - file exists\n') %
378 reltarget)
378 reltarget)
379 return
379 return
380
380
381 if after:
381 if after:
382 if not exists:
382 if not exists:
383 return
383 return
384 elif not dryrun:
384 elif not dryrun:
385 try:
385 try:
386 if exists:
386 if exists:
387 os.unlink(target)
387 os.unlink(target)
388 targetdir = os.path.dirname(target) or '.'
388 targetdir = os.path.dirname(target) or '.'
389 if not os.path.isdir(targetdir):
389 if not os.path.isdir(targetdir):
390 os.makedirs(targetdir)
390 os.makedirs(targetdir)
391 util.copyfile(src, target)
391 util.copyfile(src, target)
392 except IOError, inst:
392 except IOError, inst:
393 if inst.errno == errno.ENOENT:
393 if inst.errno == errno.ENOENT:
394 ui.warn(_('%s: deleted in working copy\n') % relsrc)
394 ui.warn(_('%s: deleted in working copy\n') % relsrc)
395 else:
395 else:
396 ui.warn(_('%s: cannot copy - %s\n') %
396 ui.warn(_('%s: cannot copy - %s\n') %
397 (relsrc, inst.strerror))
397 (relsrc, inst.strerror))
398 return True # report a failure
398 return True # report a failure
399
399
400 if ui.verbose or not exact:
400 if ui.verbose or not exact:
401 if rename:
401 if rename:
402 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
402 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
403 else:
403 else:
404 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
404 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
405
405
406 targets[abstarget] = abssrc
406 targets[abstarget] = abssrc
407
407
408 # fix up dirstate
408 # fix up dirstate
409 origsrc = repo.dirstate.copied(abssrc) or abssrc
409 origsrc = repo.dirstate.copied(abssrc) or abssrc
410 if abstarget == origsrc: # copying back a copy?
410 if abstarget == origsrc: # copying back a copy?
411 if state not in 'mn' and not dryrun:
411 if state not in 'mn' and not dryrun:
412 repo.dirstate.normallookup(abstarget)
412 repo.dirstate.normallookup(abstarget)
413 else:
413 else:
414 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
414 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
415 if not ui.quiet:
415 if not ui.quiet:
416 ui.warn(_("%s has not been committed yet, so no copy "
416 ui.warn(_("%s has not been committed yet, so no copy "
417 "data will be stored for %s.\n")
417 "data will be stored for %s.\n")
418 % (repo.pathto(origsrc, cwd), reltarget))
418 % (repo.pathto(origsrc, cwd), reltarget))
419 if repo.dirstate[abstarget] in '?r' and not dryrun:
419 if repo.dirstate[abstarget] in '?r' and not dryrun:
420 repo.add([abstarget])
420 repo.add([abstarget])
421 elif not dryrun:
421 elif not dryrun:
422 repo.copy(origsrc, abstarget)
422 repo.copy(origsrc, abstarget)
423
423
424 if rename and not dryrun:
424 if rename and not dryrun:
425 repo.remove([abssrc], not after)
425 repo.remove([abssrc], not after)
426
426
427 # pat: ossep
427 # pat: ossep
428 # dest ossep
428 # dest ossep
429 # srcs: list of (hgsep, hgsep, ossep, bool)
429 # srcs: list of (hgsep, hgsep, ossep, bool)
430 # return: function that takes hgsep and returns ossep
430 # return: function that takes hgsep and returns ossep
431 def targetpathfn(pat, dest, srcs):
431 def targetpathfn(pat, dest, srcs):
432 if os.path.isdir(pat):
432 if os.path.isdir(pat):
433 abspfx = util.canonpath(repo.root, cwd, pat)
433 abspfx = util.canonpath(repo.root, cwd, pat)
434 abspfx = util.localpath(abspfx)
434 abspfx = util.localpath(abspfx)
435 if destdirexists:
435 if destdirexists:
436 striplen = len(os.path.split(abspfx)[0])
436 striplen = len(os.path.split(abspfx)[0])
437 else:
437 else:
438 striplen = len(abspfx)
438 striplen = len(abspfx)
439 if striplen:
439 if striplen:
440 striplen += len(os.sep)
440 striplen += len(os.sep)
441 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
441 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
442 elif destdirexists:
442 elif destdirexists:
443 res = lambda p: os.path.join(dest,
443 res = lambda p: os.path.join(dest,
444 os.path.basename(util.localpath(p)))
444 os.path.basename(util.localpath(p)))
445 else:
445 else:
446 res = lambda p: dest
446 res = lambda p: dest
447 return res
447 return res
448
448
449 # pat: ossep
449 # pat: ossep
450 # dest ossep
450 # dest ossep
451 # srcs: list of (hgsep, hgsep, ossep, bool)
451 # srcs: list of (hgsep, hgsep, ossep, bool)
452 # return: function that takes hgsep and returns ossep
452 # return: function that takes hgsep and returns ossep
453 def targetpathafterfn(pat, dest, srcs):
453 def targetpathafterfn(pat, dest, srcs):
454 if util.patkind(pat, None)[0]:
454 if util.patkind(pat, None)[0]:
455 # a mercurial pattern
455 # a mercurial pattern
456 res = lambda p: os.path.join(dest,
456 res = lambda p: os.path.join(dest,
457 os.path.basename(util.localpath(p)))
457 os.path.basename(util.localpath(p)))
458 else:
458 else:
459 abspfx = util.canonpath(repo.root, cwd, pat)
459 abspfx = util.canonpath(repo.root, cwd, pat)
460 if len(abspfx) < len(srcs[0][0]):
460 if len(abspfx) < len(srcs[0][0]):
461 # A directory. Either the target path contains the last
461 # A directory. Either the target path contains the last
462 # component of the source path or it does not.
462 # component of the source path or it does not.
463 def evalpath(striplen):
463 def evalpath(striplen):
464 score = 0
464 score = 0
465 for s in srcs:
465 for s in srcs:
466 t = os.path.join(dest, util.localpath(s[0])[striplen:])
466 t = os.path.join(dest, util.localpath(s[0])[striplen:])
467 if os.path.exists(t):
467 if os.path.exists(t):
468 score += 1
468 score += 1
469 return score
469 return score
470
470
471 abspfx = util.localpath(abspfx)
471 abspfx = util.localpath(abspfx)
472 striplen = len(abspfx)
472 striplen = len(abspfx)
473 if striplen:
473 if striplen:
474 striplen += len(os.sep)
474 striplen += len(os.sep)
475 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
475 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
476 score = evalpath(striplen)
476 score = evalpath(striplen)
477 striplen1 = len(os.path.split(abspfx)[0])
477 striplen1 = len(os.path.split(abspfx)[0])
478 if striplen1:
478 if striplen1:
479 striplen1 += len(os.sep)
479 striplen1 += len(os.sep)
480 if evalpath(striplen1) > score:
480 if evalpath(striplen1) > score:
481 striplen = striplen1
481 striplen = striplen1
482 res = lambda p: os.path.join(dest,
482 res = lambda p: os.path.join(dest,
483 util.localpath(p)[striplen:])
483 util.localpath(p)[striplen:])
484 else:
484 else:
485 # a file
485 # a file
486 if destdirexists:
486 if destdirexists:
487 res = lambda p: os.path.join(dest,
487 res = lambda p: os.path.join(dest,
488 os.path.basename(util.localpath(p)))
488 os.path.basename(util.localpath(p)))
489 else:
489 else:
490 res = lambda p: dest
490 res = lambda p: dest
491 return res
491 return res
492
492
493
493
494 pats = util.expand_glob(pats)
494 pats = util.expand_glob(pats)
495 if not pats:
495 if not pats:
496 raise util.Abort(_('no source or destination specified'))
496 raise util.Abort(_('no source or destination specified'))
497 if len(pats) == 1:
497 if len(pats) == 1:
498 raise util.Abort(_('no destination specified'))
498 raise util.Abort(_('no destination specified'))
499 dest = pats.pop()
499 dest = pats.pop()
500 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
500 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
501 if not destdirexists:
501 if not destdirexists:
502 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
502 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
503 raise util.Abort(_('with multiple sources, destination must be an '
503 raise util.Abort(_('with multiple sources, destination must be an '
504 'existing directory'))
504 'existing directory'))
505 if util.endswithsep(dest):
505 if util.endswithsep(dest):
506 raise util.Abort(_('destination %s is not a directory') % dest)
506 raise util.Abort(_('destination %s is not a directory') % dest)
507
507
508 tfn = targetpathfn
508 tfn = targetpathfn
509 if after:
509 if after:
510 tfn = targetpathafterfn
510 tfn = targetpathafterfn
511 copylist = []
511 copylist = []
512 for pat in pats:
512 for pat in pats:
513 srcs = walkpat(pat)
513 srcs = walkpat(pat)
514 if not srcs:
514 if not srcs:
515 continue
515 continue
516 copylist.append((tfn(pat, dest, srcs), srcs))
516 copylist.append((tfn(pat, dest, srcs), srcs))
517 if not copylist:
517 if not copylist:
518 raise util.Abort(_('no files to copy'))
518 raise util.Abort(_('no files to copy'))
519
519
520 errors = 0
520 errors = 0
521 for targetpath, srcs in copylist:
521 for targetpath, srcs in copylist:
522 for abssrc, relsrc, exact in srcs:
522 for abssrc, relsrc, exact in srcs:
523 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
523 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
524 errors += 1
524 errors += 1
525
525
526 if errors:
526 if errors:
527 ui.warn(_('(consider using --after)\n'))
527 ui.warn(_('(consider using --after)\n'))
528
528
529 return errors
529 return errors
530
530
531 def service(opts, parentfn=None, initfn=None, runfn=None):
531 def service(opts, parentfn=None, initfn=None, runfn=None):
532 '''Run a command as a service.'''
532 '''Run a command as a service.'''
533
533
534 if opts['daemon'] and not opts['daemon_pipefds']:
534 if opts['daemon'] and not opts['daemon_pipefds']:
535 rfd, wfd = os.pipe()
535 rfd, wfd = os.pipe()
536 args = sys.argv[:]
536 args = sys.argv[:]
537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
538 # Don't pass --cwd to the child process, because we've already
538 # Don't pass --cwd to the child process, because we've already
539 # changed directory.
539 # changed directory.
540 for i in xrange(1,len(args)):
540 for i in xrange(1,len(args)):
541 if args[i].startswith('--cwd='):
541 if args[i].startswith('--cwd='):
542 del args[i]
542 del args[i]
543 break
543 break
544 elif args[i].startswith('--cwd'):
544 elif args[i].startswith('--cwd'):
545 del args[i:i+2]
545 del args[i:i+2]
546 break
546 break
547 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
547 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
548 args[0], args)
548 args[0], args)
549 os.close(wfd)
549 os.close(wfd)
550 os.read(rfd, 1)
550 os.read(rfd, 1)
551 if parentfn:
551 if parentfn:
552 return parentfn(pid)
552 return parentfn(pid)
553 else:
553 else:
554 os._exit(0)
554 os._exit(0)
555
555
556 if initfn:
556 if initfn:
557 initfn()
557 initfn()
558
558
559 if opts['pid_file']:
559 if opts['pid_file']:
560 fp = open(opts['pid_file'], 'w')
560 fp = open(opts['pid_file'], 'w')
561 fp.write(str(os.getpid()) + '\n')
561 fp.write(str(os.getpid()) + '\n')
562 fp.close()
562 fp.close()
563
563
564 if opts['daemon_pipefds']:
564 if opts['daemon_pipefds']:
565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
566 os.close(rfd)
566 os.close(rfd)
567 try:
567 try:
568 os.setsid()
568 os.setsid()
569 except AttributeError:
569 except AttributeError:
570 pass
570 pass
571 os.write(wfd, 'y')
571 os.write(wfd, 'y')
572 os.close(wfd)
572 os.close(wfd)
573 sys.stdout.flush()
573 sys.stdout.flush()
574 sys.stderr.flush()
574 sys.stderr.flush()
575 fd = os.open(util.nulldev, os.O_RDWR)
575 fd = os.open(util.nulldev, os.O_RDWR)
576 if fd != 0: os.dup2(fd, 0)
576 if fd != 0: os.dup2(fd, 0)
577 if fd != 1: os.dup2(fd, 1)
577 if fd != 1: os.dup2(fd, 1)
578 if fd != 2: os.dup2(fd, 2)
578 if fd != 2: os.dup2(fd, 2)
579 if fd not in (0, 1, 2): os.close(fd)
579 if fd not in (0, 1, 2): os.close(fd)
580
580
581 if runfn:
581 if runfn:
582 return runfn()
582 return runfn()
583
583
584 class changeset_printer(object):
584 class changeset_printer(object):
585 '''show changeset information when templating not requested.'''
585 '''show changeset information when templating not requested.'''
586
586
587 def __init__(self, ui, repo, patch, diffopts, buffered):
587 def __init__(self, ui, repo, patch, diffopts, buffered):
588 self.ui = ui
588 self.ui = ui
589 self.repo = repo
589 self.repo = repo
590 self.buffered = buffered
590 self.buffered = buffered
591 self.patch = patch
591 self.patch = patch
592 self.diffopts = diffopts
592 self.diffopts = diffopts
593 self.header = {}
593 self.header = {}
594 self.hunk = {}
594 self.hunk = {}
595 self.lastheader = None
595 self.lastheader = None
596
596
597 def flush(self, rev):
597 def flush(self, rev):
598 if rev in self.header:
598 if rev in self.header:
599 h = self.header[rev]
599 h = self.header[rev]
600 if h != self.lastheader:
600 if h != self.lastheader:
601 self.lastheader = h
601 self.lastheader = h
602 self.ui.write(h)
602 self.ui.write(h)
603 del self.header[rev]
603 del self.header[rev]
604 if rev in self.hunk:
604 if rev in self.hunk:
605 self.ui.write(self.hunk[rev])
605 self.ui.write(self.hunk[rev])
606 del self.hunk[rev]
606 del self.hunk[rev]
607 return 1
607 return 1
608 return 0
608 return 0
609
609
610 def show(self, ctx, copies=(), **props):
610 def show(self, ctx, copies=(), **props):
611 if self.buffered:
611 if self.buffered:
612 self.ui.pushbuffer()
612 self.ui.pushbuffer()
613 self._show(ctx, copies, props)
613 self._show(ctx, copies, props)
614 self.hunk[ctx.rev()] = self.ui.popbuffer()
614 self.hunk[ctx.rev()] = self.ui.popbuffer()
615 else:
615 else:
616 self._show(ctx, copies, props)
616 self._show(ctx, copies, props)
617
617
618 def _show(self, ctx, copies, props):
618 def _show(self, ctx, copies, props):
619 '''show a single changeset or file revision'''
619 '''show a single changeset or file revision'''
620 changenode = ctx.node()
620 changenode = ctx.node()
621 rev = ctx.rev()
621 rev = ctx.rev()
622
622
623 if self.ui.quiet:
623 if self.ui.quiet:
624 self.ui.write("%d:%s\n" % (rev, short(changenode)))
624 self.ui.write("%d:%s\n" % (rev, short(changenode)))
625 return
625 return
626
626
627 log = self.repo.changelog
627 log = self.repo.changelog
628 changes = log.read(changenode)
628 changes = log.read(changenode)
629 date = util.datestr(changes[2])
629 date = util.datestr(changes[2])
630 extra = changes[5]
630 extra = changes[5]
631 branch = extra.get("branch")
631 branch = extra.get("branch")
632
632
633 hexfunc = self.ui.debugflag and hex or short
633 hexfunc = self.ui.debugflag and hex or short
634
634
635 parents = [(p, hexfunc(log.node(p)))
635 parents = [(p, hexfunc(log.node(p)))
636 for p in self._meaningful_parentrevs(log, rev)]
636 for p in self._meaningful_parentrevs(log, rev)]
637
637
638 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
638 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
639
639
640 # don't show the default branch name
640 # don't show the default branch name
641 if branch != 'default':
641 if branch != 'default':
642 branch = encoding.tolocal(branch)
642 branch = encoding.tolocal(branch)
643 self.ui.write(_("branch: %s\n") % branch)
643 self.ui.write(_("branch: %s\n") % branch)
644 for tag in self.repo.nodetags(changenode):
644 for tag in self.repo.nodetags(changenode):
645 self.ui.write(_("tag: %s\n") % tag)
645 self.ui.write(_("tag: %s\n") % tag)
646 for parent in parents:
646 for parent in parents:
647 self.ui.write(_("parent: %d:%s\n") % parent)
647 self.ui.write(_("parent: %d:%s\n") % parent)
648
648
649 if self.ui.debugflag:
649 if self.ui.debugflag:
650 self.ui.write(_("manifest: %d:%s\n") %
650 self.ui.write(_("manifest: %d:%s\n") %
651 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
651 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
652 self.ui.write(_("user: %s\n") % changes[1])
652 self.ui.write(_("user: %s\n") % changes[1])
653 self.ui.write(_("date: %s\n") % date)
653 self.ui.write(_("date: %s\n") % date)
654
654
655 if self.ui.debugflag:
655 if self.ui.debugflag:
656 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
656 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
657 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
657 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
658 files):
658 files):
659 if value:
659 if value:
660 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
660 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
661 elif changes[3] and self.ui.verbose:
661 elif changes[3] and self.ui.verbose:
662 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
662 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
663 if copies and self.ui.verbose:
663 if copies and self.ui.verbose:
664 copies = ['%s (%s)' % c for c in copies]
664 copies = ['%s (%s)' % c for c in copies]
665 self.ui.write(_("copies: %s\n") % ' '.join(copies))
665 self.ui.write(_("copies: %s\n") % ' '.join(copies))
666
666
667 if extra and self.ui.debugflag:
667 if extra and self.ui.debugflag:
668 for key, value in sorted(extra.items()):
668 for key, value in sorted(extra.items()):
669 self.ui.write(_("extra: %s=%s\n")
669 self.ui.write(_("extra: %s=%s\n")
670 % (key, value.encode('string_escape')))
670 % (key, value.encode('string_escape')))
671
671
672 description = changes[4].strip()
672 description = changes[4].strip()
673 if description:
673 if description:
674 if self.ui.verbose:
674 if self.ui.verbose:
675 self.ui.write(_("description:\n"))
675 self.ui.write(_("description:\n"))
676 self.ui.write(description)
676 self.ui.write(description)
677 self.ui.write("\n\n")
677 self.ui.write("\n\n")
678 else:
678 else:
679 self.ui.write(_("summary: %s\n") %
679 self.ui.write(_("summary: %s\n") %
680 description.splitlines()[0])
680 description.splitlines()[0])
681 self.ui.write("\n")
681 self.ui.write("\n")
682
682
683 self.showpatch(changenode)
683 self.showpatch(changenode)
684
684
685 def showpatch(self, node):
685 def showpatch(self, node):
686 if self.patch:
686 if self.patch:
687 prev = self.repo.changelog.parents(node)[0]
687 prev = self.repo.changelog.parents(node)[0]
688 chunks = patch.diff(self.repo, prev, node, match=self.patch,
688 chunks = patch.diff(self.repo, prev, node, match=self.patch,
689 opts=patch.diffopts(self.ui, self.diffopts))
689 opts=patch.diffopts(self.ui, self.diffopts))
690 for chunk in chunks:
690 for chunk in chunks:
691 self.ui.write(chunk)
691 self.ui.write(chunk)
692 self.ui.write("\n")
692 self.ui.write("\n")
693
693
694 def _meaningful_parentrevs(self, log, rev):
694 def _meaningful_parentrevs(self, log, rev):
695 """Return list of meaningful (or all if debug) parentrevs for rev.
695 """Return list of meaningful (or all if debug) parentrevs for rev.
696
696
697 For merges (two non-nullrev revisions) both parents are meaningful.
697 For merges (two non-nullrev revisions) both parents are meaningful.
698 Otherwise the first parent revision is considered meaningful if it
698 Otherwise the first parent revision is considered meaningful if it
699 is not the preceding revision.
699 is not the preceding revision.
700 """
700 """
701 parents = log.parentrevs(rev)
701 parents = log.parentrevs(rev)
702 if not self.ui.debugflag and parents[1] == nullrev:
702 if not self.ui.debugflag and parents[1] == nullrev:
703 if parents[0] >= rev - 1:
703 if parents[0] >= rev - 1:
704 parents = []
704 parents = []
705 else:
705 else:
706 parents = [parents[0]]
706 parents = [parents[0]]
707 return parents
707 return parents
708
708
709
709
710 class changeset_templater(changeset_printer):
710 class changeset_templater(changeset_printer):
711 '''format changeset information.'''
711 '''format changeset information.'''
712
712
713 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
713 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
714 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
714 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
715 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
715 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
716 self.t = templater.templater(mapfile, {'formatnode': formatnode},
716 self.t = templater.templater(mapfile, {'formatnode': formatnode},
717 cache={
717 cache={
718 'parent': '{rev}:{node|formatnode} ',
718 'parent': '{rev}:{node|formatnode} ',
719 'manifest': '{rev}:{node|formatnode}',
719 'manifest': '{rev}:{node|formatnode}',
720 'filecopy': '{name} ({source})'})
720 'filecopy': '{name} ({source})'})
721
721
722 def use_template(self, t):
722 def use_template(self, t):
723 '''set template string to use'''
723 '''set template string to use'''
724 self.t.cache['changeset'] = t
724 self.t.cache['changeset'] = t
725
725
726 def _meaningful_parentrevs(self, ctx):
726 def _meaningful_parentrevs(self, ctx):
727 """Return list of meaningful (or all if debug) parentrevs for rev.
727 """Return list of meaningful (or all if debug) parentrevs for rev.
728 """
728 """
729 parents = ctx.parents()
729 parents = ctx.parents()
730 if len(parents) > 1:
730 if len(parents) > 1:
731 return parents
731 return parents
732 if self.ui.debugflag:
732 if self.ui.debugflag:
733 return [parents[0], self.repo['null']]
733 return [parents[0], self.repo['null']]
734 if parents[0].rev() >= ctx.rev() - 1:
734 if parents[0].rev() >= ctx.rev() - 1:
735 return []
735 return []
736 return parents
736 return parents
737
737
738 def _show(self, ctx, copies, props):
738 def _show(self, ctx, copies, props):
739 '''show a single changeset or file revision'''
739 '''show a single changeset or file revision'''
740
740
741 def showlist(name, values, plural=None, **args):
741 def showlist(name, values, plural=None, **args):
742 '''expand set of values.
742 '''expand set of values.
743 name is name of key in template map.
743 name is name of key in template map.
744 values is list of strings or dicts.
744 values is list of strings or dicts.
745 plural is plural of name, if not simply name + 's'.
745 plural is plural of name, if not simply name + 's'.
746
746
747 expansion works like this, given name 'foo'.
747 expansion works like this, given name 'foo'.
748
748
749 if values is empty, expand 'no_foos'.
749 if values is empty, expand 'no_foos'.
750
750
751 if 'foo' not in template map, return values as a string,
751 if 'foo' not in template map, return values as a string,
752 joined by space.
752 joined by space.
753
753
754 expand 'start_foos'.
754 expand 'start_foos'.
755
755
756 for each value, expand 'foo'. if 'last_foo' in template
756 for each value, expand 'foo'. if 'last_foo' in template
757 map, expand it instead of 'foo' for last key.
757 map, expand it instead of 'foo' for last key.
758
758
759 expand 'end_foos'.
759 expand 'end_foos'.
760 '''
760 '''
761 if plural: names = plural
761 if plural: names = plural
762 else: names = name + 's'
762 else: names = name + 's'
763 if not values:
763 if not values:
764 noname = 'no_' + names
764 noname = 'no_' + names
765 if noname in self.t:
765 if noname in self.t:
766 yield self.t(noname, **args)
766 yield self.t(noname, **args)
767 return
767 return
768 if name not in self.t:
768 if name not in self.t:
769 if isinstance(values[0], str):
769 if isinstance(values[0], str):
770 yield ' '.join(values)
770 yield ' '.join(values)
771 else:
771 else:
772 for v in values:
772 for v in values:
773 yield dict(v, **args)
773 yield dict(v, **args)
774 return
774 return
775 startname = 'start_' + names
775 startname = 'start_' + names
776 if startname in self.t:
776 if startname in self.t:
777 yield self.t(startname, **args)
777 yield self.t(startname, **args)
778 vargs = args.copy()
778 vargs = args.copy()
779 def one(v, tag=name):
779 def one(v, tag=name):
780 try:
780 try:
781 vargs.update(v)
781 vargs.update(v)
782 except (AttributeError, ValueError):
782 except (AttributeError, ValueError):
783 try:
783 try:
784 for a, b in v:
784 for a, b in v:
785 vargs[a] = b
785 vargs[a] = b
786 except ValueError:
786 except ValueError:
787 vargs[name] = v
787 vargs[name] = v
788 return self.t(tag, **vargs)
788 return self.t(tag, **vargs)
789 lastname = 'last_' + name
789 lastname = 'last_' + name
790 if lastname in self.t:
790 if lastname in self.t:
791 last = values.pop()
791 last = values.pop()
792 else:
792 else:
793 last = None
793 last = None
794 for v in values:
794 for v in values:
795 yield one(v)
795 yield one(v)
796 if last is not None:
796 if last is not None:
797 yield one(last, tag=lastname)
797 yield one(last, tag=lastname)
798 endname = 'end_' + names
798 endname = 'end_' + names
799 if endname in self.t:
799 if endname in self.t:
800 yield self.t(endname, **args)
800 yield self.t(endname, **args)
801
801
802 def showbranches(**args):
802 def showbranches(**args):
803 branch = ctx.branch()
803 branch = ctx.branch()
804 if branch != 'default':
804 if branch != 'default':
805 branch = encoding.tolocal(branch)
805 branch = encoding.tolocal(branch)
806 return showlist('branch', [branch], plural='branches', **args)
806 return showlist('branch', [branch], plural='branches', **args)
807
807
808 def showparents(**args):
808 def showparents(**args):
809 parents = [[('rev', p.rev()), ('node', p.hex())]
809 parents = [[('rev', p.rev()), ('node', p.hex())]
810 for p in self._meaningful_parentrevs(ctx)]
810 for p in self._meaningful_parentrevs(ctx)]
811 return showlist('parent', parents, **args)
811 return showlist('parent', parents, **args)
812
812
813 def showtags(**args):
813 def showtags(**args):
814 return showlist('tag', ctx.tags(), **args)
814 return showlist('tag', ctx.tags(), **args)
815
815
816 def showextras(**args):
816 def showextras(**args):
817 for key, value in sorted(ctx.extra().items()):
817 for key, value in sorted(ctx.extra().items()):
818 args = args.copy()
818 args = args.copy()
819 args.update(dict(key=key, value=value))
819 args.update(dict(key=key, value=value))
820 yield self.t('extra', **args)
820 yield self.t('extra', **args)
821
821
822 def showcopies(**args):
822 def showcopies(**args):
823 c = [{'name': x[0], 'source': x[1]} for x in copies]
823 c = [{'name': x[0], 'source': x[1]} for x in copies]
824 return showlist('file_copy', c, plural='file_copies', **args)
824 return showlist('file_copy', c, plural='file_copies', **args)
825
825
826 files = []
826 files = []
827 def getfiles():
827 def getfiles():
828 if not files:
828 if not files:
829 files[:] = self.repo.status(ctx.parents()[0].node(),
829 files[:] = self.repo.status(ctx.parents()[0].node(),
830 ctx.node())[:3]
830 ctx.node())[:3]
831 return files
831 return files
832 def showfiles(**args):
832 def showfiles(**args):
833 return showlist('file', ctx.files(), **args)
833 return showlist('file', ctx.files(), **args)
834 def showmods(**args):
834 def showmods(**args):
835 return showlist('file_mod', getfiles()[0], **args)
835 return showlist('file_mod', getfiles()[0], **args)
836 def showadds(**args):
836 def showadds(**args):
837 return showlist('file_add', getfiles()[1], **args)
837 return showlist('file_add', getfiles()[1], **args)
838 def showdels(**args):
838 def showdels(**args):
839 return showlist('file_del', getfiles()[2], **args)
839 return showlist('file_del', getfiles()[2], **args)
840 def showmanifest(**args):
840 def showmanifest(**args):
841 args = args.copy()
841 args = args.copy()
842 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
842 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
843 node=hex(ctx.changeset()[0])))
843 node=hex(ctx.changeset()[0])))
844 return self.t('manifest', **args)
844 return self.t('manifest', **args)
845
845
846 def showdiffstat(**args):
846 def showdiffstat(**args):
847 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
847 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
848 files, adds, removes = 0, 0, 0
848 files, adds, removes = 0, 0, 0
849 for i in patch.diffstatdata(util.iterlines(diff)):
849 for i in patch.diffstatdata(util.iterlines(diff)):
850 files += 1
850 files += 1
851 adds += i[1]
851 adds += i[1]
852 removes += i[2]
852 removes += i[2]
853 return '%s: +%s/-%s' % (files, adds, removes)
853 return '%s: +%s/-%s' % (files, adds, removes)
854
854
855 defprops = {
855 defprops = {
856 'author': ctx.user(),
856 'author': ctx.user(),
857 'branches': showbranches,
857 'branches': showbranches,
858 'date': ctx.date(),
858 'date': ctx.date(),
859 'desc': ctx.description().strip(),
859 'desc': ctx.description().strip(),
860 'file_adds': showadds,
860 'file_adds': showadds,
861 'file_dels': showdels,
861 'file_dels': showdels,
862 'file_mods': showmods,
862 'file_mods': showmods,
863 'files': showfiles,
863 'files': showfiles,
864 'file_copies': showcopies,
864 'file_copies': showcopies,
865 'manifest': showmanifest,
865 'manifest': showmanifest,
866 'node': ctx.hex(),
866 'node': ctx.hex(),
867 'parents': showparents,
867 'parents': showparents,
868 'rev': ctx.rev(),
868 'rev': ctx.rev(),
869 'tags': showtags,
869 'tags': showtags,
870 'extras': showextras,
870 'extras': showextras,
871 'diffstat': showdiffstat,
871 'diffstat': showdiffstat,
872 }
872 }
873 props = props.copy()
873 props = props.copy()
874 props.update(defprops)
874 props.update(defprops)
875
875
876 # find correct templates for current mode
876 # find correct templates for current mode
877
877
878 tmplmodes = [
878 tmplmodes = [
879 (True, None),
879 (True, None),
880 (self.ui.verbose, 'verbose'),
880 (self.ui.verbose, 'verbose'),
881 (self.ui.quiet, 'quiet'),
881 (self.ui.quiet, 'quiet'),
882 (self.ui.debugflag, 'debug'),
882 (self.ui.debugflag, 'debug'),
883 ]
883 ]
884
884
885 types = {'header': '', 'changeset': 'changeset'}
885 types = {'header': '', 'changeset': 'changeset'}
886 for mode, postfix in tmplmodes:
886 for mode, postfix in tmplmodes:
887 for type in types:
887 for type in types:
888 cur = postfix and ('%s_%s' % (type, postfix)) or type
888 cur = postfix and ('%s_%s' % (type, postfix)) or type
889 if mode and cur in self.t:
889 if mode and cur in self.t:
890 types[type] = cur
890 types[type] = cur
891
891
892 try:
892 try:
893
893
894 # write header
894 # write header
895 if types['header']:
895 if types['header']:
896 h = templater.stringify(self.t(types['header'], **props))
896 h = templater.stringify(self.t(types['header'], **props))
897 if self.buffered:
897 if self.buffered:
898 self.header[ctx.rev()] = h
898 self.header[ctx.rev()] = h
899 else:
899 else:
900 self.ui.write(h)
900 self.ui.write(h)
901
901
902 # write changeset metadata, then patch if requested
902 # write changeset metadata, then patch if requested
903 key = types['changeset']
903 key = types['changeset']
904 self.ui.write(templater.stringify(self.t(key, **props)))
904 self.ui.write(templater.stringify(self.t(key, **props)))
905 self.showpatch(ctx.node())
905 self.showpatch(ctx.node())
906
906
907 except KeyError, inst:
907 except KeyError, inst:
908 msg = _("%s: no key named '%s'")
908 msg = _("%s: no key named '%s'")
909 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
909 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
910 except SyntaxError, inst:
910 except SyntaxError, inst:
911 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
911 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
912
912
913 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
913 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
914 """show one changeset using template or regular display.
914 """show one changeset using template or regular display.
915
915
916 Display format will be the first non-empty hit of:
916 Display format will be the first non-empty hit of:
917 1. option 'template'
917 1. option 'template'
918 2. option 'style'
918 2. option 'style'
919 3. [ui] setting 'logtemplate'
919 3. [ui] setting 'logtemplate'
920 4. [ui] setting 'style'
920 4. [ui] setting 'style'
921 If all of these values are either the unset or the empty string,
921 If all of these values are either the unset or the empty string,
922 regular display via changeset_printer() is done.
922 regular display via changeset_printer() is done.
923 """
923 """
924 # options
924 # options
925 patch = False
925 patch = False
926 if opts.get('patch'):
926 if opts.get('patch'):
927 patch = matchfn or matchall(repo)
927 patch = matchfn or matchall(repo)
928
928
929 tmpl = opts.get('template')
929 tmpl = opts.get('template')
930 style = None
930 style = None
931 if tmpl:
931 if tmpl:
932 tmpl = templater.parsestring(tmpl, quoted=False)
932 tmpl = templater.parsestring(tmpl, quoted=False)
933 else:
933 else:
934 style = opts.get('style')
934 style = opts.get('style')
935
935
936 # ui settings
936 # ui settings
937 if not (tmpl or style):
937 if not (tmpl or style):
938 tmpl = ui.config('ui', 'logtemplate')
938 tmpl = ui.config('ui', 'logtemplate')
939 if tmpl:
939 if tmpl:
940 tmpl = templater.parsestring(tmpl)
940 tmpl = templater.parsestring(tmpl)
941 else:
941 else:
942 style = ui.config('ui', 'style')
942 style = ui.config('ui', 'style')
943
943
944 if not (tmpl or style):
944 if not (tmpl or style):
945 return changeset_printer(ui, repo, patch, opts, buffered)
945 return changeset_printer(ui, repo, patch, opts, buffered)
946
946
947 mapfile = None
947 mapfile = None
948 if style and not tmpl:
948 if style and not tmpl:
949 mapfile = style
949 mapfile = style
950 if not os.path.split(mapfile)[0]:
950 if not os.path.split(mapfile)[0]:
951 mapname = (templater.templatepath('map-cmdline.' + mapfile)
951 mapname = (templater.templatepath('map-cmdline.' + mapfile)
952 or templater.templatepath(mapfile))
952 or templater.templatepath(mapfile))
953 if mapname: mapfile = mapname
953 if mapname: mapfile = mapname
954
954
955 try:
955 try:
956 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
956 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
957 except SyntaxError, inst:
957 except SyntaxError, inst:
958 raise util.Abort(inst.args[0])
958 raise util.Abort(inst.args[0])
959 if tmpl: t.use_template(tmpl)
959 if tmpl: t.use_template(tmpl)
960 return t
960 return t
961
961
962 def finddate(ui, repo, date):
962 def finddate(ui, repo, date):
963 """Find the tipmost changeset that matches the given date spec"""
963 """Find the tipmost changeset that matches the given date spec"""
964 df = util.matchdate(date)
964 df = util.matchdate(date)
965 get = util.cachefunc(lambda r: repo[r].changeset())
965 get = util.cachefunc(lambda r: repo[r].changeset())
966 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
966 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
967 results = {}
967 results = {}
968 for st, rev, fns in changeiter:
968 for st, rev, fns in changeiter:
969 if st == 'add':
969 if st == 'add':
970 d = get(rev)[2]
970 d = get(rev)[2]
971 if df(d[0]):
971 if df(d[0]):
972 results[rev] = d
972 results[rev] = d
973 elif st == 'iter':
973 elif st == 'iter':
974 if rev in results:
974 if rev in results:
975 ui.status(_("Found revision %s from %s\n") %
975 ui.status(_("Found revision %s from %s\n") %
976 (rev, util.datestr(results[rev])))
976 (rev, util.datestr(results[rev])))
977 return str(rev)
977 return str(rev)
978
978
979 raise util.Abort(_("revision matching date not found"))
979 raise util.Abort(_("revision matching date not found"))
980
980
981 def walkchangerevs(ui, repo, pats, change, opts):
981 def walkchangerevs(ui, repo, pats, change, opts):
982 '''Iterate over files and the revs in which they changed.
982 '''Iterate over files and the revs in which they changed.
983
983
984 Callers most commonly need to iterate backwards over the history
984 Callers most commonly need to iterate backwards over the history
985 in which they are interested. Doing so has awful (quadratic-looking)
985 in which they are interested. Doing so has awful (quadratic-looking)
986 performance, so we use iterators in a "windowed" way.
986 performance, so we use iterators in a "windowed" way.
987
987
988 We walk a window of revisions in the desired order. Within the
988 We walk a window of revisions in the desired order. Within the
989 window, we first walk forwards to gather data, then in the desired
989 window, we first walk forwards to gather data, then in the desired
990 order (usually backwards) to display it.
990 order (usually backwards) to display it.
991
991
992 This function returns an (iterator, matchfn) tuple. The iterator
992 This function returns an (iterator, matchfn) tuple. The iterator
993 yields 3-tuples. They will be of one of the following forms:
993 yields 3-tuples. They will be of one of the following forms:
994
994
995 "window", incrementing, lastrev: stepping through a window,
995 "window", incrementing, lastrev: stepping through a window,
996 positive if walking forwards through revs, last rev in the
996 positive if walking forwards through revs, last rev in the
997 sequence iterated over - use to reset state for the current window
997 sequence iterated over - use to reset state for the current window
998
998
999 "add", rev, fns: out-of-order traversal of the given file names
999 "add", rev, fns: out-of-order traversal of the given file names
1000 fns, which changed during revision rev - use to gather data for
1000 fns, which changed during revision rev - use to gather data for
1001 possible display
1001 possible display
1002
1002
1003 "iter", rev, None: in-order traversal of the revs earlier iterated
1003 "iter", rev, None: in-order traversal of the revs earlier iterated
1004 over with "add" - use to display data'''
1004 over with "add" - use to display data'''
1005
1005
1006 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1006 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1007 if start < end:
1007 if start < end:
1008 while start < end:
1008 while start < end:
1009 yield start, min(windowsize, end-start)
1009 yield start, min(windowsize, end-start)
1010 start += windowsize
1010 start += windowsize
1011 if windowsize < sizelimit:
1011 if windowsize < sizelimit:
1012 windowsize *= 2
1012 windowsize *= 2
1013 else:
1013 else:
1014 while start > end:
1014 while start > end:
1015 yield start, min(windowsize, start-end-1)
1015 yield start, min(windowsize, start-end-1)
1016 start -= windowsize
1016 start -= windowsize
1017 if windowsize < sizelimit:
1017 if windowsize < sizelimit:
1018 windowsize *= 2
1018 windowsize *= 2
1019
1019
1020 m = match(repo, pats, opts)
1020 m = match(repo, pats, opts)
1021 follow = opts.get('follow') or opts.get('follow_first')
1021 follow = opts.get('follow') or opts.get('follow_first')
1022
1022
1023 if not len(repo):
1023 if not len(repo):
1024 return [], m
1024 return [], m
1025
1025
1026 if follow:
1026 if follow:
1027 defrange = '%s:0' % repo['.'].rev()
1027 defrange = '%s:0' % repo['.'].rev()
1028 else:
1028 else:
1029 defrange = '-1:0'
1029 defrange = '-1:0'
1030 revs = revrange(repo, opts['rev'] or [defrange])
1030 revs = revrange(repo, opts['rev'] or [defrange])
1031 wanted = set()
1031 wanted = set()
1032 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1032 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1033 fncache = {}
1033 fncache = {}
1034
1034
1035 if not slowpath and not m.files():
1035 if not slowpath and not m.files():
1036 # No files, no patterns. Display all revs.
1036 # No files, no patterns. Display all revs.
1037 wanted = set(revs)
1037 wanted = set(revs)
1038 copies = []
1038 copies = []
1039 if not slowpath:
1039 if not slowpath:
1040 # Only files, no patterns. Check the history of each file.
1040 # Only files, no patterns. Check the history of each file.
1041 def filerevgen(filelog, node):
1041 def filerevgen(filelog, node):
1042 cl_count = len(repo)
1042 cl_count = len(repo)
1043 if node is None:
1043 if node is None:
1044 last = len(filelog) - 1
1044 last = len(filelog) - 1
1045 else:
1045 else:
1046 last = filelog.rev(node)
1046 last = filelog.rev(node)
1047 for i, window in increasing_windows(last, nullrev):
1047 for i, window in increasing_windows(last, nullrev):
1048 revs = []
1048 revs = []
1049 for j in xrange(i - window, i + 1):
1049 for j in xrange(i - window, i + 1):
1050 n = filelog.node(j)
1050 n = filelog.node(j)
1051 revs.append((filelog.linkrev(j),
1051 revs.append((filelog.linkrev(j),
1052 follow and filelog.renamed(n)))
1052 follow and filelog.renamed(n)))
1053 for rev in reversed(revs):
1053 for rev in reversed(revs):
1054 # only yield rev for which we have the changelog, it can
1054 # only yield rev for which we have the changelog, it can
1055 # happen while doing "hg log" during a pull or commit
1055 # happen while doing "hg log" during a pull or commit
1056 if rev[0] < cl_count:
1056 if rev[0] < cl_count:
1057 yield rev
1057 yield rev
1058 def iterfiles():
1058 def iterfiles():
1059 for filename in m.files():
1059 for filename in m.files():
1060 yield filename, None
1060 yield filename, None
1061 for filename_node in copies:
1061 for filename_node in copies:
1062 yield filename_node
1062 yield filename_node
1063 minrev, maxrev = min(revs), max(revs)
1063 minrev, maxrev = min(revs), max(revs)
1064 for file_, node in iterfiles():
1064 for file_, node in iterfiles():
1065 filelog = repo.file(file_)
1065 filelog = repo.file(file_)
1066 if not len(filelog):
1066 if not len(filelog):
1067 if node is None:
1067 if node is None:
1068 # A zero count may be a directory or deleted file, so
1068 # A zero count may be a directory or deleted file, so
1069 # try to find matching entries on the slow path.
1069 # try to find matching entries on the slow path.
1070 if follow:
1070 if follow:
1071 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1071 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1072 slowpath = True
1072 slowpath = True
1073 break
1073 break
1074 else:
1074 else:
1075 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1075 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1076 % (file_, short(node)))
1076 % (file_, short(node)))
1077 continue
1077 continue
1078 for rev, copied in filerevgen(filelog, node):
1078 for rev, copied in filerevgen(filelog, node):
1079 if rev <= maxrev:
1079 if rev <= maxrev:
1080 if rev < minrev:
1080 if rev < minrev:
1081 break
1081 break
1082 fncache.setdefault(rev, [])
1082 fncache.setdefault(rev, [])
1083 fncache[rev].append(file_)
1083 fncache[rev].append(file_)
1084 wanted.add(rev)
1084 wanted.add(rev)
1085 if follow and copied:
1085 if follow and copied:
1086 copies.append(copied)
1086 copies.append(copied)
1087 if slowpath:
1087 if slowpath:
1088 if follow:
1088 if follow:
1089 raise util.Abort(_('can only follow copies/renames for explicit '
1089 raise util.Abort(_('can only follow copies/renames for explicit '
1090 'file names'))
1090 'file names'))
1091
1091
1092 # The slow path checks files modified in every changeset.
1092 # The slow path checks files modified in every changeset.
1093 def changerevgen():
1093 def changerevgen():
1094 for i, window in increasing_windows(len(repo) - 1, nullrev):
1094 for i, window in increasing_windows(len(repo) - 1, nullrev):
1095 for j in xrange(i - window, i + 1):
1095 for j in xrange(i - window, i + 1):
1096 yield j, change(j)[3]
1096 yield j, change(j)[3]
1097
1097
1098 for rev, changefiles in changerevgen():
1098 for rev, changefiles in changerevgen():
1099 matches = filter(m, changefiles)
1099 matches = filter(m, changefiles)
1100 if matches:
1100 if matches:
1101 fncache[rev] = matches
1101 fncache[rev] = matches
1102 wanted.add(rev)
1102 wanted.add(rev)
1103
1103
1104 class followfilter:
1104 class followfilter:
1105 def __init__(self, onlyfirst=False):
1105 def __init__(self, onlyfirst=False):
1106 self.startrev = nullrev
1106 self.startrev = nullrev
1107 self.roots = []
1107 self.roots = []
1108 self.onlyfirst = onlyfirst
1108 self.onlyfirst = onlyfirst
1109
1109
1110 def match(self, rev):
1110 def match(self, rev):
1111 def realparents(rev):
1111 def realparents(rev):
1112 if self.onlyfirst:
1112 if self.onlyfirst:
1113 return repo.changelog.parentrevs(rev)[0:1]
1113 return repo.changelog.parentrevs(rev)[0:1]
1114 else:
1114 else:
1115 return filter(lambda x: x != nullrev,
1115 return filter(lambda x: x != nullrev,
1116 repo.changelog.parentrevs(rev))
1116 repo.changelog.parentrevs(rev))
1117
1117
1118 if self.startrev == nullrev:
1118 if self.startrev == nullrev:
1119 self.startrev = rev
1119 self.startrev = rev
1120 return True
1120 return True
1121
1121
1122 if rev > self.startrev:
1122 if rev > self.startrev:
1123 # forward: all descendants
1123 # forward: all descendants
1124 if not self.roots:
1124 if not self.roots:
1125 self.roots.append(self.startrev)
1125 self.roots.append(self.startrev)
1126 for parent in realparents(rev):
1126 for parent in realparents(rev):
1127 if parent in self.roots:
1127 if parent in self.roots:
1128 self.roots.append(rev)
1128 self.roots.append(rev)
1129 return True
1129 return True
1130 else:
1130 else:
1131 # backwards: all parents
1131 # backwards: all parents
1132 if not self.roots:
1132 if not self.roots:
1133 self.roots.extend(realparents(self.startrev))
1133 self.roots.extend(realparents(self.startrev))
1134 if rev in self.roots:
1134 if rev in self.roots:
1135 self.roots.remove(rev)
1135 self.roots.remove(rev)
1136 self.roots.extend(realparents(rev))
1136 self.roots.extend(realparents(rev))
1137 return True
1137 return True
1138
1138
1139 return False
1139 return False
1140
1140
1141 # it might be worthwhile to do this in the iterator if the rev range
1141 # it might be worthwhile to do this in the iterator if the rev range
1142 # is descending and the prune args are all within that range
1142 # is descending and the prune args are all within that range
1143 for rev in opts.get('prune', ()):
1143 for rev in opts.get('prune', ()):
1144 rev = repo.changelog.rev(repo.lookup(rev))
1144 rev = repo.changelog.rev(repo.lookup(rev))
1145 ff = followfilter()
1145 ff = followfilter()
1146 stop = min(revs[0], revs[-1])
1146 stop = min(revs[0], revs[-1])
1147 for x in xrange(rev, stop-1, -1):
1147 for x in xrange(rev, stop-1, -1):
1148 if ff.match(x):
1148 if ff.match(x):
1149 wanted.discard(x)
1149 wanted.discard(x)
1150
1150
1151 def iterate():
1151 def iterate():
1152 if follow and not m.files():
1152 if follow and not m.files():
1153 ff = followfilter(onlyfirst=opts.get('follow_first'))
1153 ff = followfilter(onlyfirst=opts.get('follow_first'))
1154 def want(rev):
1154 def want(rev):
1155 return ff.match(rev) and rev in wanted
1155 return ff.match(rev) and rev in wanted
1156 else:
1156 else:
1157 def want(rev):
1157 def want(rev):
1158 return rev in wanted
1158 return rev in wanted
1159
1159
1160 for i, window in increasing_windows(0, len(revs)):
1160 for i, window in increasing_windows(0, len(revs)):
1161 yield 'window', revs[0] < revs[-1], revs[-1]
1161 yield 'window', revs[0] < revs[-1], revs[-1]
1162 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1162 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1163 for rev in sorted(nrevs):
1163 for rev in sorted(nrevs):
1164 fns = fncache.get(rev)
1164 fns = fncache.get(rev)
1165 if not fns:
1165 if not fns:
1166 def fns_generator():
1166 def fns_generator():
1167 for f in change(rev)[3]:
1167 for f in change(rev)[3]:
1168 if m(f):
1168 if m(f):
1169 yield f
1169 yield f
1170 fns = fns_generator()
1170 fns = fns_generator()
1171 yield 'add', rev, fns
1171 yield 'add', rev, fns
1172 for rev in nrevs:
1172 for rev in nrevs:
1173 yield 'iter', rev, None
1173 yield 'iter', rev, None
1174 return iterate(), m
1174 return iterate(), m
1175
1175
1176 def commit(ui, repo, commitfunc, pats, opts):
1176 def commit(ui, repo, commitfunc, pats, opts):
1177 '''commit the specified files or all outstanding changes'''
1177 '''commit the specified files or all outstanding changes'''
1178 date = opts.get('date')
1178 date = opts.get('date')
1179 if date:
1179 if date:
1180 opts['date'] = util.parsedate(date)
1180 opts['date'] = util.parsedate(date)
1181 message = logmessage(opts)
1181 message = logmessage(opts)
1182
1182
1183 # extract addremove carefully -- this function can be called from a command
1183 # extract addremove carefully -- this function can be called from a command
1184 # that doesn't support addremove
1184 # that doesn't support addremove
1185 if opts.get('addremove'):
1185 if opts.get('addremove'):
1186 addremove(repo, pats, opts)
1186 addremove(repo, pats, opts)
1187
1187
1188 m = match(repo, pats, opts)
1188 m = match(repo, pats, opts)
1189 if pats:
1189 if pats:
1190 modified, added, removed = repo.status(match=m)[:3]
1190 modified, added, removed = repo.status(match=m)[:3]
1191 files = sorted(modified + added + removed)
1191 files = sorted(modified + added + removed)
1192
1192
1193 def is_dir(f):
1193 def is_dir(f):
1194 name = f + '/'
1194 name = f + '/'
1195 i = bisect.bisect(files, name)
1195 i = bisect.bisect(files, name)
1196 return i < len(files) and files[i].startswith(name)
1196 return i < len(files) and files[i].startswith(name)
1197
1197
1198 for f in m.files():
1198 for f in m.files():
1199 if f == '.':
1199 if f == '.':
1200 continue
1200 continue
1201 if f not in files:
1201 if f not in files:
1202 rf = repo.wjoin(f)
1202 rf = repo.wjoin(f)
1203 rel = repo.pathto(f)
1203 rel = repo.pathto(f)
1204 try:
1204 try:
1205 mode = os.lstat(rf)[stat.ST_MODE]
1205 mode = os.lstat(rf)[stat.ST_MODE]
1206 except OSError:
1206 except OSError:
1207 if is_dir(f): # deleted directory ?
1207 if is_dir(f): # deleted directory ?
1208 continue
1208 continue
1209 raise util.Abort(_("file %s not found!") % rel)
1209 raise util.Abort(_("file %s not found!") % rel)
1210 if stat.S_ISDIR(mode):
1210 if stat.S_ISDIR(mode):
1211 if not is_dir(f):
1211 if not is_dir(f):
1212 raise util.Abort(_("no match under directory %s!")
1212 raise util.Abort(_("no match under directory %s!")
1213 % rel)
1213 % rel)
1214 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1214 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1215 raise util.Abort(_("can't commit %s: "
1215 raise util.Abort(_("can't commit %s: "
1216 "unsupported file type!") % rel)
1216 "unsupported file type!") % rel)
1217 elif f not in repo.dirstate:
1217 elif f not in repo.dirstate:
1218 raise util.Abort(_("file %s not tracked!") % rel)
1218 raise util.Abort(_("file %s not tracked!") % rel)
1219 m = matchfiles(repo, files)
1219 m = matchfiles(repo, files)
1220 try:
1220 try:
1221 return commitfunc(ui, repo, message, m, opts)
1221 return commitfunc(ui, repo, message, m, opts)
1222 except ValueError, inst:
1222 except ValueError, inst:
1223 raise util.Abort(str(inst))
1223 raise util.Abort(str(inst))
1224
1225 def commiteditor(repo, ctx, added, updated, removed):
1226 if ctx.description():
1227 return ctx.description()
1228 return commitforceeditor(repo, ctx, added, updated, removed)
1229
1230 def commitforceeditor(repo, ctx, added, updated, removed):
1231 edittext = []
1232 if ctx.description():
1233 edittext.append(ctx.description())
1234 edittext.append("")
1235 edittext.append("") # Empty line between message and comments.
1236 edittext.append(_("HG: Enter commit message."
1237 " Lines beginning with 'HG:' are removed."))
1238 edittext.append("HG: --")
1239 edittext.append(_("HG: user: %s") % ctx.user())
1240 if ctx.p2():
1241 edittext.append(_("HG: branch merge"))
1242 if ctx.branch():
1243 edittext.append(_("HG: branch '%s'")
1244 % encoding.tolocal(ctx.branch()))
1245 edittext.extend([_("HG: added %s") % f for f in added])
1246 edittext.extend([_("HG: changed %s") % f for f in updated])
1247 edittext.extend([_("HG: removed %s") % f for f in removed])
1248 if not added and not updated and not removed:
1249 edittext.append(_("HG: no files changed"))
1250 edittext.append("")
1251 # run editor in the repository root
1252 olddir = os.getcwd()
1253 os.chdir(repo.root)
1254 text = repo.ui.edit("\n".join(edittext), ctx.user())
1255 os.chdir(olddir)
1256
1257 if not text.strip():
1258 raise util.Abort(_("empty commit message"))
1259
1260 return text
@@ -1,3447 +1,3451 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, textwrap, subprocess, difflib, time
11 import os, re, sys, textwrap, subprocess, difflib, time
12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
13 import patch, help, mdiff, tempfile, url, encoding
13 import patch, help, mdiff, tempfile, url, encoding
14 import archival, changegroup, cmdutil, sshserver, hbisect
14 import archival, changegroup, cmdutil, sshserver, hbisect
15 from hgweb import server
15 from hgweb import server
16 import merge as merge_
16 import merge as merge_
17
17
18 # Commands start here, listed alphabetically
18 # Commands start here, listed alphabetically
19
19
20 def add(ui, repo, *pats, **opts):
20 def add(ui, repo, *pats, **opts):
21 """add the specified files on the next commit
21 """add the specified files on the next commit
22
22
23 Schedule files to be version controlled and added to the
23 Schedule files to be version controlled and added to the
24 repository.
24 repository.
25
25
26 The files will be added to the repository at the next commit. To
26 The files will be added to the repository at the next commit. To
27 undo an add before that, see hg revert.
27 undo an add before that, see hg revert.
28
28
29 If no names are given, add all files to the repository.
29 If no names are given, add all files to the repository.
30 """
30 """
31
31
32 rejected = None
32 rejected = None
33 exacts = {}
33 exacts = {}
34 names = []
34 names = []
35 m = cmdutil.match(repo, pats, opts)
35 m = cmdutil.match(repo, pats, opts)
36 m.bad = lambda x,y: True
36 m.bad = lambda x,y: True
37 for abs in repo.walk(m):
37 for abs in repo.walk(m):
38 if m.exact(abs):
38 if m.exact(abs):
39 if ui.verbose:
39 if ui.verbose:
40 ui.status(_('adding %s\n') % m.rel(abs))
40 ui.status(_('adding %s\n') % m.rel(abs))
41 names.append(abs)
41 names.append(abs)
42 exacts[abs] = 1
42 exacts[abs] = 1
43 elif abs not in repo.dirstate:
43 elif abs not in repo.dirstate:
44 ui.status(_('adding %s\n') % m.rel(abs))
44 ui.status(_('adding %s\n') % m.rel(abs))
45 names.append(abs)
45 names.append(abs)
46 if not opts.get('dry_run'):
46 if not opts.get('dry_run'):
47 rejected = repo.add(names)
47 rejected = repo.add(names)
48 rejected = [p for p in rejected if p in exacts]
48 rejected = [p for p in rejected if p in exacts]
49 return rejected and 1 or 0
49 return rejected and 1 or 0
50
50
51 def addremove(ui, repo, *pats, **opts):
51 def addremove(ui, repo, *pats, **opts):
52 """add all new files, delete all missing files
52 """add all new files, delete all missing files
53
53
54 Add all new files and remove all missing files from the
54 Add all new files and remove all missing files from the
55 repository.
55 repository.
56
56
57 New files are ignored if they match any of the patterns in
57 New files are ignored if they match any of the patterns in
58 .hgignore. As with add, these changes take effect at the next
58 .hgignore. As with add, these changes take effect at the next
59 commit.
59 commit.
60
60
61 Use the -s/--similarity option to detect renamed files. With a
61 Use the -s/--similarity option to detect renamed files. With a
62 parameter > 0, this compares every removed file with every added
62 parameter > 0, this compares every removed file with every added
63 file and records those similar enough as renames. This option
63 file and records those similar enough as renames. This option
64 takes a percentage between 0 (disabled) and 100 (files must be
64 takes a percentage between 0 (disabled) and 100 (files must be
65 identical) as its parameter. Detecting renamed files this way can
65 identical) as its parameter. Detecting renamed files this way can
66 be expensive.
66 be expensive.
67 """
67 """
68 try:
68 try:
69 sim = float(opts.get('similarity') or 0)
69 sim = float(opts.get('similarity') or 0)
70 except ValueError:
70 except ValueError:
71 raise util.Abort(_('similarity must be a number'))
71 raise util.Abort(_('similarity must be a number'))
72 if sim < 0 or sim > 100:
72 if sim < 0 or sim > 100:
73 raise util.Abort(_('similarity must be between 0 and 100'))
73 raise util.Abort(_('similarity must be between 0 and 100'))
74 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
74 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
75
75
76 def annotate(ui, repo, *pats, **opts):
76 def annotate(ui, repo, *pats, **opts):
77 """show changeset information per file line
77 """show changeset information per file line
78
78
79 List changes in files, showing the revision id responsible for
79 List changes in files, showing the revision id responsible for
80 each line
80 each line
81
81
82 This command is useful to discover who did a change or when a
82 This command is useful to discover who did a change or when a
83 change took place.
83 change took place.
84
84
85 Without the -a/--text option, annotate will avoid processing files
85 Without the -a/--text option, annotate will avoid processing files
86 it detects as binary. With -a, annotate will generate an
86 it detects as binary. With -a, annotate will generate an
87 annotation anyway, probably with undesirable results.
87 annotation anyway, probably with undesirable results.
88 """
88 """
89 datefunc = ui.quiet and util.shortdate or util.datestr
89 datefunc = ui.quiet and util.shortdate or util.datestr
90 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
90 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
91
91
92 if not pats:
92 if not pats:
93 raise util.Abort(_('at least one file name or pattern required'))
93 raise util.Abort(_('at least one file name or pattern required'))
94
94
95 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
95 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
96 ('number', lambda x: str(x[0].rev())),
96 ('number', lambda x: str(x[0].rev())),
97 ('changeset', lambda x: short(x[0].node())),
97 ('changeset', lambda x: short(x[0].node())),
98 ('date', getdate),
98 ('date', getdate),
99 ('follow', lambda x: x[0].path()),
99 ('follow', lambda x: x[0].path()),
100 ]
100 ]
101
101
102 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
102 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
103 and not opts.get('follow')):
103 and not opts.get('follow')):
104 opts['number'] = 1
104 opts['number'] = 1
105
105
106 linenumber = opts.get('line_number') is not None
106 linenumber = opts.get('line_number') is not None
107 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
107 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
108 raise util.Abort(_('at least one of -n/-c is required for -l'))
108 raise util.Abort(_('at least one of -n/-c is required for -l'))
109
109
110 funcmap = [func for op, func in opmap if opts.get(op)]
110 funcmap = [func for op, func in opmap if opts.get(op)]
111 if linenumber:
111 if linenumber:
112 lastfunc = funcmap[-1]
112 lastfunc = funcmap[-1]
113 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
113 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
114
114
115 ctx = repo[opts.get('rev')]
115 ctx = repo[opts.get('rev')]
116
116
117 m = cmdutil.match(repo, pats, opts)
117 m = cmdutil.match(repo, pats, opts)
118 for abs in ctx.walk(m):
118 for abs in ctx.walk(m):
119 fctx = ctx[abs]
119 fctx = ctx[abs]
120 if not opts.get('text') and util.binary(fctx.data()):
120 if not opts.get('text') and util.binary(fctx.data()):
121 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
121 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
122 continue
122 continue
123
123
124 lines = fctx.annotate(follow=opts.get('follow'),
124 lines = fctx.annotate(follow=opts.get('follow'),
125 linenumber=linenumber)
125 linenumber=linenumber)
126 pieces = []
126 pieces = []
127
127
128 for f in funcmap:
128 for f in funcmap:
129 l = [f(n) for n, dummy in lines]
129 l = [f(n) for n, dummy in lines]
130 if l:
130 if l:
131 ml = max(map(len, l))
131 ml = max(map(len, l))
132 pieces.append(["%*s" % (ml, x) for x in l])
132 pieces.append(["%*s" % (ml, x) for x in l])
133
133
134 if pieces:
134 if pieces:
135 for p, l in zip(zip(*pieces), lines):
135 for p, l in zip(zip(*pieces), lines):
136 ui.write("%s: %s" % (" ".join(p), l[1]))
136 ui.write("%s: %s" % (" ".join(p), l[1]))
137
137
138 def archive(ui, repo, dest, **opts):
138 def archive(ui, repo, dest, **opts):
139 '''create unversioned archive of a repository revision
139 '''create unversioned archive of a repository revision
140
140
141 By default, the revision used is the parent of the working
141 By default, the revision used is the parent of the working
142 directory; use -r/--rev to specify a different revision.
142 directory; use -r/--rev to specify a different revision.
143
143
144 To specify the type of archive to create, use -t/--type. Valid
144 To specify the type of archive to create, use -t/--type. Valid
145 types are:
145 types are:
146
146
147 "files" (default): a directory full of files
147 "files" (default): a directory full of files
148 "tar": tar archive, uncompressed
148 "tar": tar archive, uncompressed
149 "tbz2": tar archive, compressed using bzip2
149 "tbz2": tar archive, compressed using bzip2
150 "tgz": tar archive, compressed using gzip
150 "tgz": tar archive, compressed using gzip
151 "uzip": zip archive, uncompressed
151 "uzip": zip archive, uncompressed
152 "zip": zip archive, compressed using deflate
152 "zip": zip archive, compressed using deflate
153
153
154 The exact name of the destination archive or directory is given
154 The exact name of the destination archive or directory is given
155 using a format string; see 'hg help export' for details.
155 using a format string; see 'hg help export' for details.
156
156
157 Each member added to an archive file has a directory prefix
157 Each member added to an archive file has a directory prefix
158 prepended. Use -p/--prefix to specify a format string for the
158 prepended. Use -p/--prefix to specify a format string for the
159 prefix. The default is the basename of the archive, with suffixes
159 prefix. The default is the basename of the archive, with suffixes
160 removed.
160 removed.
161 '''
161 '''
162
162
163 ctx = repo[opts.get('rev')]
163 ctx = repo[opts.get('rev')]
164 if not ctx:
164 if not ctx:
165 raise util.Abort(_('no working directory: please specify a revision'))
165 raise util.Abort(_('no working directory: please specify a revision'))
166 node = ctx.node()
166 node = ctx.node()
167 dest = cmdutil.make_filename(repo, dest, node)
167 dest = cmdutil.make_filename(repo, dest, node)
168 if os.path.realpath(dest) == repo.root:
168 if os.path.realpath(dest) == repo.root:
169 raise util.Abort(_('repository root cannot be destination'))
169 raise util.Abort(_('repository root cannot be destination'))
170 matchfn = cmdutil.match(repo, [], opts)
170 matchfn = cmdutil.match(repo, [], opts)
171 kind = opts.get('type') or 'files'
171 kind = opts.get('type') or 'files'
172 prefix = opts.get('prefix')
172 prefix = opts.get('prefix')
173 if dest == '-':
173 if dest == '-':
174 if kind == 'files':
174 if kind == 'files':
175 raise util.Abort(_('cannot archive plain files to stdout'))
175 raise util.Abort(_('cannot archive plain files to stdout'))
176 dest = sys.stdout
176 dest = sys.stdout
177 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
177 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
178 prefix = cmdutil.make_filename(repo, prefix, node)
178 prefix = cmdutil.make_filename(repo, prefix, node)
179 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
179 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
180 matchfn, prefix)
180 matchfn, prefix)
181
181
182 def backout(ui, repo, node=None, rev=None, **opts):
182 def backout(ui, repo, node=None, rev=None, **opts):
183 '''reverse effect of earlier changeset
183 '''reverse effect of earlier changeset
184
184
185 Commit the backed out changes as a new changeset. The new
185 Commit the backed out changes as a new changeset. The new
186 changeset is a child of the backed out changeset.
186 changeset is a child of the backed out changeset.
187
187
188 If you back out a changeset other than the tip, a new head is
188 If you back out a changeset other than the tip, a new head is
189 created. This head will be the new tip and you should merge this
189 created. This head will be the new tip and you should merge this
190 backout changeset with another head (current one by default).
190 backout changeset with another head (current one by default).
191
191
192 The --merge option remembers the parent of the working directory
192 The --merge option remembers the parent of the working directory
193 before starting the backout, then merges the new head with that
193 before starting the backout, then merges the new head with that
194 changeset afterwards. This saves you from doing the merge by hand.
194 changeset afterwards. This saves you from doing the merge by hand.
195 The result of this merge is not committed, as with a normal merge.
195 The result of this merge is not committed, as with a normal merge.
196
196
197 See \'hg help dates\' for a list of formats valid for -d/--date.
197 See \'hg help dates\' for a list of formats valid for -d/--date.
198 '''
198 '''
199 if rev and node:
199 if rev and node:
200 raise util.Abort(_("please specify just one revision"))
200 raise util.Abort(_("please specify just one revision"))
201
201
202 if not rev:
202 if not rev:
203 rev = node
203 rev = node
204
204
205 if not rev:
205 if not rev:
206 raise util.Abort(_("please specify a revision to backout"))
206 raise util.Abort(_("please specify a revision to backout"))
207
207
208 date = opts.get('date')
208 date = opts.get('date')
209 if date:
209 if date:
210 opts['date'] = util.parsedate(date)
210 opts['date'] = util.parsedate(date)
211
211
212 cmdutil.bail_if_changed(repo)
212 cmdutil.bail_if_changed(repo)
213 node = repo.lookup(rev)
213 node = repo.lookup(rev)
214
214
215 op1, op2 = repo.dirstate.parents()
215 op1, op2 = repo.dirstate.parents()
216 a = repo.changelog.ancestor(op1, node)
216 a = repo.changelog.ancestor(op1, node)
217 if a != node:
217 if a != node:
218 raise util.Abort(_('cannot back out change on a different branch'))
218 raise util.Abort(_('cannot back out change on a different branch'))
219
219
220 p1, p2 = repo.changelog.parents(node)
220 p1, p2 = repo.changelog.parents(node)
221 if p1 == nullid:
221 if p1 == nullid:
222 raise util.Abort(_('cannot back out a change with no parents'))
222 raise util.Abort(_('cannot back out a change with no parents'))
223 if p2 != nullid:
223 if p2 != nullid:
224 if not opts.get('parent'):
224 if not opts.get('parent'):
225 raise util.Abort(_('cannot back out a merge changeset without '
225 raise util.Abort(_('cannot back out a merge changeset without '
226 '--parent'))
226 '--parent'))
227 p = repo.lookup(opts['parent'])
227 p = repo.lookup(opts['parent'])
228 if p not in (p1, p2):
228 if p not in (p1, p2):
229 raise util.Abort(_('%s is not a parent of %s') %
229 raise util.Abort(_('%s is not a parent of %s') %
230 (short(p), short(node)))
230 (short(p), short(node)))
231 parent = p
231 parent = p
232 else:
232 else:
233 if opts.get('parent'):
233 if opts.get('parent'):
234 raise util.Abort(_('cannot use --parent on non-merge changeset'))
234 raise util.Abort(_('cannot use --parent on non-merge changeset'))
235 parent = p1
235 parent = p1
236
236
237 # the backout should appear on the same branch
237 # the backout should appear on the same branch
238 branch = repo.dirstate.branch()
238 branch = repo.dirstate.branch()
239 hg.clean(repo, node, show_stats=False)
239 hg.clean(repo, node, show_stats=False)
240 repo.dirstate.setbranch(branch)
240 repo.dirstate.setbranch(branch)
241 revert_opts = opts.copy()
241 revert_opts = opts.copy()
242 revert_opts['date'] = None
242 revert_opts['date'] = None
243 revert_opts['all'] = True
243 revert_opts['all'] = True
244 revert_opts['rev'] = hex(parent)
244 revert_opts['rev'] = hex(parent)
245 revert_opts['no_backup'] = None
245 revert_opts['no_backup'] = None
246 revert(ui, repo, **revert_opts)
246 revert(ui, repo, **revert_opts)
247 commit_opts = opts.copy()
247 commit_opts = opts.copy()
248 commit_opts['addremove'] = False
248 commit_opts['addremove'] = False
249 if not commit_opts['message'] and not commit_opts['logfile']:
249 if not commit_opts['message'] and not commit_opts['logfile']:
250 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
250 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
251 commit_opts['force_editor'] = True
251 commit_opts['force_editor'] = True
252 commit(ui, repo, **commit_opts)
252 commit(ui, repo, **commit_opts)
253 def nice(node):
253 def nice(node):
254 return '%d:%s' % (repo.changelog.rev(node), short(node))
254 return '%d:%s' % (repo.changelog.rev(node), short(node))
255 ui.status(_('changeset %s backs out changeset %s\n') %
255 ui.status(_('changeset %s backs out changeset %s\n') %
256 (nice(repo.changelog.tip()), nice(node)))
256 (nice(repo.changelog.tip()), nice(node)))
257 if op1 != node:
257 if op1 != node:
258 hg.clean(repo, op1, show_stats=False)
258 hg.clean(repo, op1, show_stats=False)
259 if opts.get('merge'):
259 if opts.get('merge'):
260 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
260 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
261 hg.merge(repo, hex(repo.changelog.tip()))
261 hg.merge(repo, hex(repo.changelog.tip()))
262 else:
262 else:
263 ui.status(_('the backout changeset is a new head - '
263 ui.status(_('the backout changeset is a new head - '
264 'do not forget to merge\n'))
264 'do not forget to merge\n'))
265 ui.status(_('(use "backout --merge" '
265 ui.status(_('(use "backout --merge" '
266 'if you want to auto-merge)\n'))
266 'if you want to auto-merge)\n'))
267
267
268 def bisect(ui, repo, rev=None, extra=None, command=None,
268 def bisect(ui, repo, rev=None, extra=None, command=None,
269 reset=None, good=None, bad=None, skip=None, noupdate=None):
269 reset=None, good=None, bad=None, skip=None, noupdate=None):
270 """subdivision search of changesets
270 """subdivision search of changesets
271
271
272 This command helps to find changesets which introduce problems. To
272 This command helps to find changesets which introduce problems. To
273 use, mark the earliest changeset you know exhibits the problem as
273 use, mark the earliest changeset you know exhibits the problem as
274 bad, then mark the latest changeset which is free from the problem
274 bad, then mark the latest changeset which is free from the problem
275 as good. Bisect will update your working directory to a revision
275 as good. Bisect will update your working directory to a revision
276 for testing (unless the -U/--noupdate option is specified). Once
276 for testing (unless the -U/--noupdate option is specified). Once
277 you have performed tests, mark the working directory as bad or
277 you have performed tests, mark the working directory as bad or
278 good and bisect will either update to another candidate changeset
278 good and bisect will either update to another candidate changeset
279 or announce that it has found the bad revision.
279 or announce that it has found the bad revision.
280
280
281 As a shortcut, you can also use the revision argument to mark a
281 As a shortcut, you can also use the revision argument to mark a
282 revision as good or bad without checking it out first.
282 revision as good or bad without checking it out first.
283
283
284 If you supply a command it will be used for automatic bisection.
284 If you supply a command it will be used for automatic bisection.
285 Its exit status will be used as flag to mark revision as bad or
285 Its exit status will be used as flag to mark revision as bad or
286 good. In case exit status is 0 the revision is marked as good, 125
286 good. In case exit status is 0 the revision is marked as good, 125
287 - skipped, 127 (command not found) - bisection will be aborted;
287 - skipped, 127 (command not found) - bisection will be aborted;
288 any other status bigger than 0 will mark revision as bad.
288 any other status bigger than 0 will mark revision as bad.
289 """
289 """
290 def print_result(nodes, good):
290 def print_result(nodes, good):
291 displayer = cmdutil.show_changeset(ui, repo, {})
291 displayer = cmdutil.show_changeset(ui, repo, {})
292 if len(nodes) == 1:
292 if len(nodes) == 1:
293 # narrowed it down to a single revision
293 # narrowed it down to a single revision
294 if good:
294 if good:
295 ui.write(_("The first good revision is:\n"))
295 ui.write(_("The first good revision is:\n"))
296 else:
296 else:
297 ui.write(_("The first bad revision is:\n"))
297 ui.write(_("The first bad revision is:\n"))
298 displayer.show(repo[nodes[0]])
298 displayer.show(repo[nodes[0]])
299 else:
299 else:
300 # multiple possible revisions
300 # multiple possible revisions
301 if good:
301 if good:
302 ui.write(_("Due to skipped revisions, the first "
302 ui.write(_("Due to skipped revisions, the first "
303 "good revision could be any of:\n"))
303 "good revision could be any of:\n"))
304 else:
304 else:
305 ui.write(_("Due to skipped revisions, the first "
305 ui.write(_("Due to skipped revisions, the first "
306 "bad revision could be any of:\n"))
306 "bad revision could be any of:\n"))
307 for n in nodes:
307 for n in nodes:
308 displayer.show(repo[n])
308 displayer.show(repo[n])
309
309
310 def check_state(state, interactive=True):
310 def check_state(state, interactive=True):
311 if not state['good'] or not state['bad']:
311 if not state['good'] or not state['bad']:
312 if (good or bad or skip or reset) and interactive:
312 if (good or bad or skip or reset) and interactive:
313 return
313 return
314 if not state['good']:
314 if not state['good']:
315 raise util.Abort(_('cannot bisect (no known good revisions)'))
315 raise util.Abort(_('cannot bisect (no known good revisions)'))
316 else:
316 else:
317 raise util.Abort(_('cannot bisect (no known bad revisions)'))
317 raise util.Abort(_('cannot bisect (no known bad revisions)'))
318 return True
318 return True
319
319
320 # backward compatibility
320 # backward compatibility
321 if rev in "good bad reset init".split():
321 if rev in "good bad reset init".split():
322 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
322 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
323 cmd, rev, extra = rev, extra, None
323 cmd, rev, extra = rev, extra, None
324 if cmd == "good":
324 if cmd == "good":
325 good = True
325 good = True
326 elif cmd == "bad":
326 elif cmd == "bad":
327 bad = True
327 bad = True
328 else:
328 else:
329 reset = True
329 reset = True
330 elif extra or good + bad + skip + reset + bool(command) > 1:
330 elif extra or good + bad + skip + reset + bool(command) > 1:
331 raise util.Abort(_('incompatible arguments'))
331 raise util.Abort(_('incompatible arguments'))
332
332
333 if reset:
333 if reset:
334 p = repo.join("bisect.state")
334 p = repo.join("bisect.state")
335 if os.path.exists(p):
335 if os.path.exists(p):
336 os.unlink(p)
336 os.unlink(p)
337 return
337 return
338
338
339 state = hbisect.load_state(repo)
339 state = hbisect.load_state(repo)
340
340
341 if command:
341 if command:
342 commandpath = util.find_exe(command)
342 commandpath = util.find_exe(command)
343 changesets = 1
343 changesets = 1
344 try:
344 try:
345 while changesets:
345 while changesets:
346 # update state
346 # update state
347 status = subprocess.call([commandpath])
347 status = subprocess.call([commandpath])
348 if status == 125:
348 if status == 125:
349 transition = "skip"
349 transition = "skip"
350 elif status == 0:
350 elif status == 0:
351 transition = "good"
351 transition = "good"
352 # status < 0 means process was killed
352 # status < 0 means process was killed
353 elif status == 127:
353 elif status == 127:
354 raise util.Abort(_("failed to execute %s") % command)
354 raise util.Abort(_("failed to execute %s") % command)
355 elif status < 0:
355 elif status < 0:
356 raise util.Abort(_("%s killed") % command)
356 raise util.Abort(_("%s killed") % command)
357 else:
357 else:
358 transition = "bad"
358 transition = "bad"
359 node = repo.lookup(rev or '.')
359 node = repo.lookup(rev or '.')
360 state[transition].append(node)
360 state[transition].append(node)
361 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
361 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
362 check_state(state, interactive=False)
362 check_state(state, interactive=False)
363 # bisect
363 # bisect
364 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
364 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
365 # update to next check
365 # update to next check
366 cmdutil.bail_if_changed(repo)
366 cmdutil.bail_if_changed(repo)
367 hg.clean(repo, nodes[0], show_stats=False)
367 hg.clean(repo, nodes[0], show_stats=False)
368 finally:
368 finally:
369 hbisect.save_state(repo, state)
369 hbisect.save_state(repo, state)
370 return print_result(nodes, not status)
370 return print_result(nodes, not status)
371
371
372 # update state
372 # update state
373 node = repo.lookup(rev or '.')
373 node = repo.lookup(rev or '.')
374 if good:
374 if good:
375 state['good'].append(node)
375 state['good'].append(node)
376 elif bad:
376 elif bad:
377 state['bad'].append(node)
377 state['bad'].append(node)
378 elif skip:
378 elif skip:
379 state['skip'].append(node)
379 state['skip'].append(node)
380
380
381 hbisect.save_state(repo, state)
381 hbisect.save_state(repo, state)
382
382
383 if not check_state(state):
383 if not check_state(state):
384 return
384 return
385
385
386 # actually bisect
386 # actually bisect
387 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
387 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
388 if changesets == 0:
388 if changesets == 0:
389 print_result(nodes, good)
389 print_result(nodes, good)
390 else:
390 else:
391 assert len(nodes) == 1 # only a single node can be tested next
391 assert len(nodes) == 1 # only a single node can be tested next
392 node = nodes[0]
392 node = nodes[0]
393 # compute the approximate number of remaining tests
393 # compute the approximate number of remaining tests
394 tests, size = 0, 2
394 tests, size = 0, 2
395 while size <= changesets:
395 while size <= changesets:
396 tests, size = tests + 1, size * 2
396 tests, size = tests + 1, size * 2
397 rev = repo.changelog.rev(node)
397 rev = repo.changelog.rev(node)
398 ui.write(_("Testing changeset %s:%s "
398 ui.write(_("Testing changeset %s:%s "
399 "(%s changesets remaining, ~%s tests)\n")
399 "(%s changesets remaining, ~%s tests)\n")
400 % (rev, short(node), changesets, tests))
400 % (rev, short(node), changesets, tests))
401 if not noupdate:
401 if not noupdate:
402 cmdutil.bail_if_changed(repo)
402 cmdutil.bail_if_changed(repo)
403 return hg.clean(repo, node)
403 return hg.clean(repo, node)
404
404
405 def branch(ui, repo, label=None, **opts):
405 def branch(ui, repo, label=None, **opts):
406 """set or show the current branch name
406 """set or show the current branch name
407
407
408 With no argument, show the current branch name. With one argument,
408 With no argument, show the current branch name. With one argument,
409 set the working directory branch name (the branch does not exist
409 set the working directory branch name (the branch does not exist
410 in the repository until the next commit). It is recommended to use
410 in the repository until the next commit). It is recommended to use
411 the 'default' branch as your primary development branch.
411 the 'default' branch as your primary development branch.
412
412
413 Unless -f/--force is specified, branch will not let you set a
413 Unless -f/--force is specified, branch will not let you set a
414 branch name that shadows an existing branch.
414 branch name that shadows an existing branch.
415
415
416 Use -C/--clean to reset the working directory branch to that of
416 Use -C/--clean to reset the working directory branch to that of
417 the parent of the working directory, negating a previous branch
417 the parent of the working directory, negating a previous branch
418 change.
418 change.
419
419
420 Use the command 'hg update' to switch to an existing branch.
420 Use the command 'hg update' to switch to an existing branch.
421 """
421 """
422
422
423 if opts.get('clean'):
423 if opts.get('clean'):
424 label = repo[None].parents()[0].branch()
424 label = repo[None].parents()[0].branch()
425 repo.dirstate.setbranch(label)
425 repo.dirstate.setbranch(label)
426 ui.status(_('reset working directory to branch %s\n') % label)
426 ui.status(_('reset working directory to branch %s\n') % label)
427 elif label:
427 elif label:
428 if not opts.get('force') and label in repo.branchtags():
428 if not opts.get('force') and label in repo.branchtags():
429 if label not in [p.branch() for p in repo.parents()]:
429 if label not in [p.branch() for p in repo.parents()]:
430 raise util.Abort(_('a branch of the same name already exists'
430 raise util.Abort(_('a branch of the same name already exists'
431 ' (use --force to override)'))
431 ' (use --force to override)'))
432 repo.dirstate.setbranch(encoding.fromlocal(label))
432 repo.dirstate.setbranch(encoding.fromlocal(label))
433 ui.status(_('marked working directory as branch %s\n') % label)
433 ui.status(_('marked working directory as branch %s\n') % label)
434 else:
434 else:
435 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
435 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
436
436
437 def branches(ui, repo, active=False):
437 def branches(ui, repo, active=False):
438 """list repository named branches
438 """list repository named branches
439
439
440 List the repository's named branches, indicating which ones are
440 List the repository's named branches, indicating which ones are
441 inactive. If active is specified, only show active branches.
441 inactive. If active is specified, only show active branches.
442
442
443 A branch is considered active if it contains repository heads.
443 A branch is considered active if it contains repository heads.
444
444
445 Use the command 'hg update' to switch to an existing branch.
445 Use the command 'hg update' to switch to an existing branch.
446 """
446 """
447 hexfunc = ui.debugflag and hex or short
447 hexfunc = ui.debugflag and hex or short
448 activebranches = [encoding.tolocal(repo[n].branch())
448 activebranches = [encoding.tolocal(repo[n].branch())
449 for n in repo.heads(closed=False)]
449 for n in repo.heads(closed=False)]
450 branches = sorted([(tag in activebranches, repo.changelog.rev(node), tag)
450 branches = sorted([(tag in activebranches, repo.changelog.rev(node), tag)
451 for tag, node in repo.branchtags().items()],
451 for tag, node in repo.branchtags().items()],
452 reverse=True)
452 reverse=True)
453
453
454 for isactive, node, tag in branches:
454 for isactive, node, tag in branches:
455 if (not active) or isactive:
455 if (not active) or isactive:
456 if ui.quiet:
456 if ui.quiet:
457 ui.write("%s\n" % tag)
457 ui.write("%s\n" % tag)
458 else:
458 else:
459 hn = repo.lookup(node)
459 hn = repo.lookup(node)
460 if isactive:
460 if isactive:
461 notice = ''
461 notice = ''
462 elif hn not in repo.branchheads(tag, closed=False):
462 elif hn not in repo.branchheads(tag, closed=False):
463 notice = ' (closed)'
463 notice = ' (closed)'
464 else:
464 else:
465 notice = ' (inactive)'
465 notice = ' (inactive)'
466 rev = str(node).rjust(31 - encoding.colwidth(tag))
466 rev = str(node).rjust(31 - encoding.colwidth(tag))
467 data = tag, rev, hexfunc(hn), notice
467 data = tag, rev, hexfunc(hn), notice
468 ui.write("%s %s:%s%s\n" % data)
468 ui.write("%s %s:%s%s\n" % data)
469
469
470 def bundle(ui, repo, fname, dest=None, **opts):
470 def bundle(ui, repo, fname, dest=None, **opts):
471 """create a changegroup file
471 """create a changegroup file
472
472
473 Generate a compressed changegroup file collecting changesets not
473 Generate a compressed changegroup file collecting changesets not
474 known to be in another repository.
474 known to be in another repository.
475
475
476 If no destination repository is specified the destination is
476 If no destination repository is specified the destination is
477 assumed to have all the nodes specified by one or more --base
477 assumed to have all the nodes specified by one or more --base
478 parameters. To create a bundle containing all changesets, use
478 parameters. To create a bundle containing all changesets, use
479 -a/--all (or --base null). To change the compression method
479 -a/--all (or --base null). To change the compression method
480 applied, use the -t/--type option (by default, bundles are
480 applied, use the -t/--type option (by default, bundles are
481 compressed using bz2).
481 compressed using bz2).
482
482
483 The bundle file can then be transferred using conventional means
483 The bundle file can then be transferred using conventional means
484 and applied to another repository with the unbundle or pull
484 and applied to another repository with the unbundle or pull
485 command. This is useful when direct push and pull are not
485 command. This is useful when direct push and pull are not
486 available or when exporting an entire repository is undesirable.
486 available or when exporting an entire repository is undesirable.
487
487
488 Applying bundles preserves all changeset contents including
488 Applying bundles preserves all changeset contents including
489 permissions, copy/rename information, and revision history.
489 permissions, copy/rename information, and revision history.
490 """
490 """
491 revs = opts.get('rev') or None
491 revs = opts.get('rev') or None
492 if revs:
492 if revs:
493 revs = [repo.lookup(rev) for rev in revs]
493 revs = [repo.lookup(rev) for rev in revs]
494 if opts.get('all'):
494 if opts.get('all'):
495 base = ['null']
495 base = ['null']
496 else:
496 else:
497 base = opts.get('base')
497 base = opts.get('base')
498 if base:
498 if base:
499 if dest:
499 if dest:
500 raise util.Abort(_("--base is incompatible with specifiying "
500 raise util.Abort(_("--base is incompatible with specifiying "
501 "a destination"))
501 "a destination"))
502 base = [repo.lookup(rev) for rev in base]
502 base = [repo.lookup(rev) for rev in base]
503 # create the right base
503 # create the right base
504 # XXX: nodesbetween / changegroup* should be "fixed" instead
504 # XXX: nodesbetween / changegroup* should be "fixed" instead
505 o = []
505 o = []
506 has = {nullid: None}
506 has = {nullid: None}
507 for n in base:
507 for n in base:
508 has.update(repo.changelog.reachable(n))
508 has.update(repo.changelog.reachable(n))
509 if revs:
509 if revs:
510 visit = list(revs)
510 visit = list(revs)
511 else:
511 else:
512 visit = repo.changelog.heads()
512 visit = repo.changelog.heads()
513 seen = {}
513 seen = {}
514 while visit:
514 while visit:
515 n = visit.pop(0)
515 n = visit.pop(0)
516 parents = [p for p in repo.changelog.parents(n) if p not in has]
516 parents = [p for p in repo.changelog.parents(n) if p not in has]
517 if len(parents) == 0:
517 if len(parents) == 0:
518 o.insert(0, n)
518 o.insert(0, n)
519 else:
519 else:
520 for p in parents:
520 for p in parents:
521 if p not in seen:
521 if p not in seen:
522 seen[p] = 1
522 seen[p] = 1
523 visit.append(p)
523 visit.append(p)
524 else:
524 else:
525 dest, revs, checkout = hg.parseurl(
525 dest, revs, checkout = hg.parseurl(
526 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
526 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
527 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
527 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
528 o = repo.findoutgoing(other, force=opts.get('force'))
528 o = repo.findoutgoing(other, force=opts.get('force'))
529
529
530 if revs:
530 if revs:
531 cg = repo.changegroupsubset(o, revs, 'bundle')
531 cg = repo.changegroupsubset(o, revs, 'bundle')
532 else:
532 else:
533 cg = repo.changegroup(o, 'bundle')
533 cg = repo.changegroup(o, 'bundle')
534
534
535 bundletype = opts.get('type', 'bzip2').lower()
535 bundletype = opts.get('type', 'bzip2').lower()
536 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
536 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
537 bundletype = btypes.get(bundletype)
537 bundletype = btypes.get(bundletype)
538 if bundletype not in changegroup.bundletypes:
538 if bundletype not in changegroup.bundletypes:
539 raise util.Abort(_('unknown bundle type specified with --type'))
539 raise util.Abort(_('unknown bundle type specified with --type'))
540
540
541 changegroup.writebundle(cg, fname, bundletype)
541 changegroup.writebundle(cg, fname, bundletype)
542
542
543 def cat(ui, repo, file1, *pats, **opts):
543 def cat(ui, repo, file1, *pats, **opts):
544 """output the current or given revision of files
544 """output the current or given revision of files
545
545
546 Print the specified files as they were at the given revision. If
546 Print the specified files as they were at the given revision. If
547 no revision is given, the parent of the working directory is used,
547 no revision is given, the parent of the working directory is used,
548 or tip if no revision is checked out.
548 or tip if no revision is checked out.
549
549
550 Output may be to a file, in which case the name of the file is
550 Output may be to a file, in which case the name of the file is
551 given using a format string. The formatting rules are the same as
551 given using a format string. The formatting rules are the same as
552 for the export command, with the following additions:
552 for the export command, with the following additions:
553
553
554 %s basename of file being printed
554 %s basename of file being printed
555 %d dirname of file being printed, or '.' if in repository root
555 %d dirname of file being printed, or '.' if in repository root
556 %p root-relative path name of file being printed
556 %p root-relative path name of file being printed
557 """
557 """
558 ctx = repo[opts.get('rev')]
558 ctx = repo[opts.get('rev')]
559 err = 1
559 err = 1
560 m = cmdutil.match(repo, (file1,) + pats, opts)
560 m = cmdutil.match(repo, (file1,) + pats, opts)
561 for abs in ctx.walk(m):
561 for abs in ctx.walk(m):
562 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
562 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
563 data = ctx[abs].data()
563 data = ctx[abs].data()
564 if opts.get('decode'):
564 if opts.get('decode'):
565 data = repo.wwritedata(abs, data)
565 data = repo.wwritedata(abs, data)
566 fp.write(data)
566 fp.write(data)
567 err = 0
567 err = 0
568 return err
568 return err
569
569
570 def clone(ui, source, dest=None, **opts):
570 def clone(ui, source, dest=None, **opts):
571 """make a copy of an existing repository
571 """make a copy of an existing repository
572
572
573 Create a copy of an existing repository in a new directory.
573 Create a copy of an existing repository in a new directory.
574
574
575 If no destination directory name is specified, it defaults to the
575 If no destination directory name is specified, it defaults to the
576 basename of the source.
576 basename of the source.
577
577
578 The location of the source is added to the new repository's
578 The location of the source is added to the new repository's
579 .hg/hgrc file, as the default to be used for future pulls.
579 .hg/hgrc file, as the default to be used for future pulls.
580
580
581 If you use the -r/--rev option to clone up to a specific revision,
581 If you use the -r/--rev option to clone up to a specific revision,
582 no subsequent revisions (including subsequent tags) will be
582 no subsequent revisions (including subsequent tags) will be
583 present in the cloned repository. This option implies --pull, even
583 present in the cloned repository. This option implies --pull, even
584 on local repositories.
584 on local repositories.
585
585
586 By default, clone will check out the head of the 'default' branch.
586 By default, clone will check out the head of the 'default' branch.
587 If the -U/--noupdate option is used, the new clone will contain
587 If the -U/--noupdate option is used, the new clone will contain
588 only a repository (.hg) and no working copy (the working copy
588 only a repository (.hg) and no working copy (the working copy
589 parent is the null revision).
589 parent is the null revision).
590
590
591 See 'hg help urls' for valid source format details.
591 See 'hg help urls' for valid source format details.
592
592
593 It is possible to specify an ssh:// URL as the destination, but no
593 It is possible to specify an ssh:// URL as the destination, but no
594 .hg/hgrc and working directory will be created on the remote side.
594 .hg/hgrc and working directory will be created on the remote side.
595 Look at the help text for URLs for important details about ssh://
595 Look at the help text for URLs for important details about ssh://
596 URLs.
596 URLs.
597
597
598 For efficiency, hardlinks are used for cloning whenever the source
598 For efficiency, hardlinks are used for cloning whenever the source
599 and destination are on the same filesystem (note this applies only
599 and destination are on the same filesystem (note this applies only
600 to the repository data, not to the checked out files). Some
600 to the repository data, not to the checked out files). Some
601 filesystems, such as AFS, implement hardlinking incorrectly, but
601 filesystems, such as AFS, implement hardlinking incorrectly, but
602 do not report errors. In these cases, use the --pull option to
602 do not report errors. In these cases, use the --pull option to
603 avoid hardlinking.
603 avoid hardlinking.
604
604
605 In some cases, you can clone repositories and checked out files
605 In some cases, you can clone repositories and checked out files
606 using full hardlinks with
606 using full hardlinks with
607
607
608 $ cp -al REPO REPOCLONE
608 $ cp -al REPO REPOCLONE
609
609
610 This is the fastest way to clone, but it is not always safe. The
610 This is the fastest way to clone, but it is not always safe. The
611 operation is not atomic (making sure REPO is not modified during
611 operation is not atomic (making sure REPO is not modified during
612 the operation is up to you) and you have to make sure your editor
612 the operation is up to you) and you have to make sure your editor
613 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
613 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
614 this is not compatible with certain extensions that place their
614 this is not compatible with certain extensions that place their
615 metadata under the .hg directory, such as mq.
615 metadata under the .hg directory, such as mq.
616
616
617 """
617 """
618 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
618 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
619 pull=opts.get('pull'),
619 pull=opts.get('pull'),
620 stream=opts.get('uncompressed'),
620 stream=opts.get('uncompressed'),
621 rev=opts.get('rev'),
621 rev=opts.get('rev'),
622 update=not opts.get('noupdate'))
622 update=not opts.get('noupdate'))
623
623
624 def commit(ui, repo, *pats, **opts):
624 def commit(ui, repo, *pats, **opts):
625 """commit the specified files or all outstanding changes
625 """commit the specified files or all outstanding changes
626
626
627 Commit changes to the given files into the repository. Unlike a
627 Commit changes to the given files into the repository. Unlike a
628 centralized RCS, this operation is a local operation. See hg push
628 centralized RCS, this operation is a local operation. See hg push
629 for means to actively distribute your changes.
629 for means to actively distribute your changes.
630
630
631 If a list of files is omitted, all changes reported by "hg status"
631 If a list of files is omitted, all changes reported by "hg status"
632 will be committed.
632 will be committed.
633
633
634 If you are committing the result of a merge, do not provide any
634 If you are committing the result of a merge, do not provide any
635 file names or -I/-X filters.
635 file names or -I/-X filters.
636
636
637 If no commit message is specified, the configured editor is
637 If no commit message is specified, the configured editor is
638 started to prompt you for a message.
638 started to prompt you for a message.
639
639
640 See 'hg help dates' for a list of formats valid for -d/--date.
640 See 'hg help dates' for a list of formats valid for -d/--date.
641 """
641 """
642 extra = {}
642 extra = {}
643 if opts.get('close_branch'):
643 if opts.get('close_branch'):
644 extra['close'] = 1
644 extra['close'] = 1
645 e = cmdutil.commiteditor
646 if opts.get('force_editor'):
647 e = cmdutil.commitforceeditor
648
645 def commitfunc(ui, repo, message, match, opts):
649 def commitfunc(ui, repo, message, match, opts):
646 return repo.commit(match.files(), message, opts.get('user'),
650 return repo.commit(match.files(), message, opts.get('user'),
647 opts.get('date'), match, force_editor=opts.get('force_editor'),
651 opts.get('date'), match, editor=e, extra=extra)
648 extra=extra)
649
652
650 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
653 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
651 if not node:
654 if not node:
652 return
655 return
653 cl = repo.changelog
656 cl = repo.changelog
654 rev = cl.rev(node)
657 rev = cl.rev(node)
655 parents = cl.parentrevs(rev)
658 parents = cl.parentrevs(rev)
656 if rev - 1 in parents:
659 if rev - 1 in parents:
657 # one of the parents was the old tip
660 # one of the parents was the old tip
658 pass
661 pass
659 elif (parents == (nullrev, nullrev) or
662 elif (parents == (nullrev, nullrev) or
660 len(cl.heads(cl.node(parents[0]))) > 1 and
663 len(cl.heads(cl.node(parents[0]))) > 1 and
661 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
664 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
662 ui.status(_('created new head\n'))
665 ui.status(_('created new head\n'))
663
666
664 if ui.debugflag:
667 if ui.debugflag:
665 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
668 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
666 elif ui.verbose:
669 elif ui.verbose:
667 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
670 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
668
671
669 def copy(ui, repo, *pats, **opts):
672 def copy(ui, repo, *pats, **opts):
670 """mark files as copied for the next commit
673 """mark files as copied for the next commit
671
674
672 Mark dest as having copies of source files. If dest is a
675 Mark dest as having copies of source files. If dest is a
673 directory, copies are put in that directory. If dest is a file,
676 directory, copies are put in that directory. If dest is a file,
674 the source must be a single file.
677 the source must be a single file.
675
678
676 By default, this command copies the contents of files as they
679 By default, this command copies the contents of files as they
677 stand in the working directory. If invoked with -A/--after, the
680 stand in the working directory. If invoked with -A/--after, the
678 operation is recorded, but no copying is performed.
681 operation is recorded, but no copying is performed.
679
682
680 This command takes effect with the next commit. To undo a copy
683 This command takes effect with the next commit. To undo a copy
681 before that, see hg revert.
684 before that, see hg revert.
682 """
685 """
683 wlock = repo.wlock(False)
686 wlock = repo.wlock(False)
684 try:
687 try:
685 return cmdutil.copy(ui, repo, pats, opts)
688 return cmdutil.copy(ui, repo, pats, opts)
686 finally:
689 finally:
687 wlock.release()
690 wlock.release()
688
691
689 def debugancestor(ui, repo, *args):
692 def debugancestor(ui, repo, *args):
690 """find the ancestor revision of two revisions in a given index"""
693 """find the ancestor revision of two revisions in a given index"""
691 if len(args) == 3:
694 if len(args) == 3:
692 index, rev1, rev2 = args
695 index, rev1, rev2 = args
693 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
696 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
694 lookup = r.lookup
697 lookup = r.lookup
695 elif len(args) == 2:
698 elif len(args) == 2:
696 if not repo:
699 if not repo:
697 raise util.Abort(_("There is no Mercurial repository here "
700 raise util.Abort(_("There is no Mercurial repository here "
698 "(.hg not found)"))
701 "(.hg not found)"))
699 rev1, rev2 = args
702 rev1, rev2 = args
700 r = repo.changelog
703 r = repo.changelog
701 lookup = repo.lookup
704 lookup = repo.lookup
702 else:
705 else:
703 raise util.Abort(_('either two or three arguments required'))
706 raise util.Abort(_('either two or three arguments required'))
704 a = r.ancestor(lookup(rev1), lookup(rev2))
707 a = r.ancestor(lookup(rev1), lookup(rev2))
705 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
708 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
706
709
707 def debugcommands(ui, cmd='', *args):
710 def debugcommands(ui, cmd='', *args):
708 for cmd, vals in sorted(table.iteritems()):
711 for cmd, vals in sorted(table.iteritems()):
709 cmd = cmd.split('|')[0].strip('^')
712 cmd = cmd.split('|')[0].strip('^')
710 opts = ', '.join([i[1] for i in vals[1]])
713 opts = ', '.join([i[1] for i in vals[1]])
711 ui.write('%s: %s\n' % (cmd, opts))
714 ui.write('%s: %s\n' % (cmd, opts))
712
715
713 def debugcomplete(ui, cmd='', **opts):
716 def debugcomplete(ui, cmd='', **opts):
714 """returns the completion list associated with the given command"""
717 """returns the completion list associated with the given command"""
715
718
716 if opts.get('options'):
719 if opts.get('options'):
717 options = []
720 options = []
718 otables = [globalopts]
721 otables = [globalopts]
719 if cmd:
722 if cmd:
720 aliases, entry = cmdutil.findcmd(cmd, table, False)
723 aliases, entry = cmdutil.findcmd(cmd, table, False)
721 otables.append(entry[1])
724 otables.append(entry[1])
722 for t in otables:
725 for t in otables:
723 for o in t:
726 for o in t:
724 if o[0]:
727 if o[0]:
725 options.append('-%s' % o[0])
728 options.append('-%s' % o[0])
726 options.append('--%s' % o[1])
729 options.append('--%s' % o[1])
727 ui.write("%s\n" % "\n".join(options))
730 ui.write("%s\n" % "\n".join(options))
728 return
731 return
729
732
730 cmdlist = cmdutil.findpossible(cmd, table)
733 cmdlist = cmdutil.findpossible(cmd, table)
731 if ui.verbose:
734 if ui.verbose:
732 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
735 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
733 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
736 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
734
737
735 def debugfsinfo(ui, path = "."):
738 def debugfsinfo(ui, path = "."):
736 file('.debugfsinfo', 'w').write('')
739 file('.debugfsinfo', 'w').write('')
737 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
740 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
738 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
741 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
739 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
742 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
740 and 'yes' or 'no'))
743 and 'yes' or 'no'))
741 os.unlink('.debugfsinfo')
744 os.unlink('.debugfsinfo')
742
745
743 def debugrebuildstate(ui, repo, rev="tip"):
746 def debugrebuildstate(ui, repo, rev="tip"):
744 """rebuild the dirstate as it would look like for the given revision"""
747 """rebuild the dirstate as it would look like for the given revision"""
745 ctx = repo[rev]
748 ctx = repo[rev]
746 wlock = repo.wlock()
749 wlock = repo.wlock()
747 try:
750 try:
748 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
751 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
749 finally:
752 finally:
750 wlock.release()
753 wlock.release()
751
754
752 def debugcheckstate(ui, repo):
755 def debugcheckstate(ui, repo):
753 """validate the correctness of the current dirstate"""
756 """validate the correctness of the current dirstate"""
754 parent1, parent2 = repo.dirstate.parents()
757 parent1, parent2 = repo.dirstate.parents()
755 m1 = repo[parent1].manifest()
758 m1 = repo[parent1].manifest()
756 m2 = repo[parent2].manifest()
759 m2 = repo[parent2].manifest()
757 errors = 0
760 errors = 0
758 for f in repo.dirstate:
761 for f in repo.dirstate:
759 state = repo.dirstate[f]
762 state = repo.dirstate[f]
760 if state in "nr" and f not in m1:
763 if state in "nr" and f not in m1:
761 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
764 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
762 errors += 1
765 errors += 1
763 if state in "a" and f in m1:
766 if state in "a" and f in m1:
764 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
767 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
765 errors += 1
768 errors += 1
766 if state in "m" and f not in m1 and f not in m2:
769 if state in "m" and f not in m1 and f not in m2:
767 ui.warn(_("%s in state %s, but not in either manifest\n") %
770 ui.warn(_("%s in state %s, but not in either manifest\n") %
768 (f, state))
771 (f, state))
769 errors += 1
772 errors += 1
770 for f in m1:
773 for f in m1:
771 state = repo.dirstate[f]
774 state = repo.dirstate[f]
772 if state not in "nrm":
775 if state not in "nrm":
773 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
776 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
774 errors += 1
777 errors += 1
775 if errors:
778 if errors:
776 error = _(".hg/dirstate inconsistent with current parent's manifest")
779 error = _(".hg/dirstate inconsistent with current parent's manifest")
777 raise util.Abort(error)
780 raise util.Abort(error)
778
781
779 def showconfig(ui, repo, *values, **opts):
782 def showconfig(ui, repo, *values, **opts):
780 """show combined config settings from all hgrc files
783 """show combined config settings from all hgrc files
781
784
782 With no args, print names and values of all config items.
785 With no args, print names and values of all config items.
783
786
784 With one arg of the form section.name, print just the value of
787 With one arg of the form section.name, print just the value of
785 that config item.
788 that config item.
786
789
787 With multiple args, print names and values of all config items
790 With multiple args, print names and values of all config items
788 with matching section names.
791 with matching section names.
789
792
790 With the --debug flag, the source (filename and line number) is
793 With the --debug flag, the source (filename and line number) is
791 printed for each config item.
794 printed for each config item.
792 """
795 """
793
796
794 untrusted = bool(opts.get('untrusted'))
797 untrusted = bool(opts.get('untrusted'))
795 if values:
798 if values:
796 if len([v for v in values if '.' in v]) > 1:
799 if len([v for v in values if '.' in v]) > 1:
797 raise util.Abort(_('only one config item permitted'))
800 raise util.Abort(_('only one config item permitted'))
798 for section, name, value in ui.walkconfig(untrusted=untrusted):
801 for section, name, value in ui.walkconfig(untrusted=untrusted):
799 sectname = section + '.' + name
802 sectname = section + '.' + name
800 if values:
803 if values:
801 for v in values:
804 for v in values:
802 if v == section:
805 if v == section:
803 ui.debug('%s: ' %
806 ui.debug('%s: ' %
804 ui.configsource(section, name, untrusted))
807 ui.configsource(section, name, untrusted))
805 ui.write('%s=%s\n' % (sectname, value))
808 ui.write('%s=%s\n' % (sectname, value))
806 elif v == sectname:
809 elif v == sectname:
807 ui.debug('%s: ' %
810 ui.debug('%s: ' %
808 ui.configsource(section, name, untrusted))
811 ui.configsource(section, name, untrusted))
809 ui.write(value, '\n')
812 ui.write(value, '\n')
810 else:
813 else:
811 ui.debug('%s: ' %
814 ui.debug('%s: ' %
812 ui.configsource(section, name, untrusted))
815 ui.configsource(section, name, untrusted))
813 ui.write('%s=%s\n' % (sectname, value))
816 ui.write('%s=%s\n' % (sectname, value))
814
817
815 def debugsetparents(ui, repo, rev1, rev2=None):
818 def debugsetparents(ui, repo, rev1, rev2=None):
816 """manually set the parents of the current working directory
819 """manually set the parents of the current working directory
817
820
818 This is useful for writing repository conversion tools, but should
821 This is useful for writing repository conversion tools, but should
819 be used with care.
822 be used with care.
820 """
823 """
821
824
822 if not rev2:
825 if not rev2:
823 rev2 = hex(nullid)
826 rev2 = hex(nullid)
824
827
825 wlock = repo.wlock()
828 wlock = repo.wlock()
826 try:
829 try:
827 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
830 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
828 finally:
831 finally:
829 wlock.release()
832 wlock.release()
830
833
831 def debugstate(ui, repo, nodates=None):
834 def debugstate(ui, repo, nodates=None):
832 """show the contents of the current dirstate"""
835 """show the contents of the current dirstate"""
833 timestr = ""
836 timestr = ""
834 showdate = not nodates
837 showdate = not nodates
835 for file_, ent in sorted(repo.dirstate._map.iteritems()):
838 for file_, ent in sorted(repo.dirstate._map.iteritems()):
836 if showdate:
839 if showdate:
837 if ent[3] == -1:
840 if ent[3] == -1:
838 # Pad or slice to locale representation
841 # Pad or slice to locale representation
839 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
842 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
840 timestr = 'unset'
843 timestr = 'unset'
841 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
844 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
842 else:
845 else:
843 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
846 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
844 if ent[1] & 020000:
847 if ent[1] & 020000:
845 mode = 'lnk'
848 mode = 'lnk'
846 else:
849 else:
847 mode = '%3o' % (ent[1] & 0777)
850 mode = '%3o' % (ent[1] & 0777)
848 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
851 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
849 for f in repo.dirstate.copies():
852 for f in repo.dirstate.copies():
850 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
853 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
851
854
852 def debugdata(ui, file_, rev):
855 def debugdata(ui, file_, rev):
853 """dump the contents of a data file revision"""
856 """dump the contents of a data file revision"""
854 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
857 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
855 try:
858 try:
856 ui.write(r.revision(r.lookup(rev)))
859 ui.write(r.revision(r.lookup(rev)))
857 except KeyError:
860 except KeyError:
858 raise util.Abort(_('invalid revision identifier %s') % rev)
861 raise util.Abort(_('invalid revision identifier %s') % rev)
859
862
860 def debugdate(ui, date, range=None, **opts):
863 def debugdate(ui, date, range=None, **opts):
861 """parse and display a date"""
864 """parse and display a date"""
862 if opts["extended"]:
865 if opts["extended"]:
863 d = util.parsedate(date, util.extendeddateformats)
866 d = util.parsedate(date, util.extendeddateformats)
864 else:
867 else:
865 d = util.parsedate(date)
868 d = util.parsedate(date)
866 ui.write("internal: %s %s\n" % d)
869 ui.write("internal: %s %s\n" % d)
867 ui.write("standard: %s\n" % util.datestr(d))
870 ui.write("standard: %s\n" % util.datestr(d))
868 if range:
871 if range:
869 m = util.matchdate(range)
872 m = util.matchdate(range)
870 ui.write("match: %s\n" % m(d[0]))
873 ui.write("match: %s\n" % m(d[0]))
871
874
872 def debugindex(ui, file_):
875 def debugindex(ui, file_):
873 """dump the contents of an index file"""
876 """dump the contents of an index file"""
874 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
877 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
875 ui.write(" rev offset length base linkrev"
878 ui.write(" rev offset length base linkrev"
876 " nodeid p1 p2\n")
879 " nodeid p1 p2\n")
877 for i in r:
880 for i in r:
878 node = r.node(i)
881 node = r.node(i)
879 try:
882 try:
880 pp = r.parents(node)
883 pp = r.parents(node)
881 except:
884 except:
882 pp = [nullid, nullid]
885 pp = [nullid, nullid]
883 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
886 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
884 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
887 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
885 short(node), short(pp[0]), short(pp[1])))
888 short(node), short(pp[0]), short(pp[1])))
886
889
887 def debugindexdot(ui, file_):
890 def debugindexdot(ui, file_):
888 """dump an index DAG as a .dot file"""
891 """dump an index DAG as a .dot file"""
889 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
892 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
890 ui.write("digraph G {\n")
893 ui.write("digraph G {\n")
891 for i in r:
894 for i in r:
892 node = r.node(i)
895 node = r.node(i)
893 pp = r.parents(node)
896 pp = r.parents(node)
894 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
897 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
895 if pp[1] != nullid:
898 if pp[1] != nullid:
896 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
899 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
897 ui.write("}\n")
900 ui.write("}\n")
898
901
899 def debuginstall(ui):
902 def debuginstall(ui):
900 '''test Mercurial installation'''
903 '''test Mercurial installation'''
901
904
902 def writetemp(contents):
905 def writetemp(contents):
903 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
906 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
904 f = os.fdopen(fd, "wb")
907 f = os.fdopen(fd, "wb")
905 f.write(contents)
908 f.write(contents)
906 f.close()
909 f.close()
907 return name
910 return name
908
911
909 problems = 0
912 problems = 0
910
913
911 # encoding
914 # encoding
912 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
915 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
913 try:
916 try:
914 encoding.fromlocal("test")
917 encoding.fromlocal("test")
915 except util.Abort, inst:
918 except util.Abort, inst:
916 ui.write(" %s\n" % inst)
919 ui.write(" %s\n" % inst)
917 ui.write(_(" (check that your locale is properly set)\n"))
920 ui.write(_(" (check that your locale is properly set)\n"))
918 problems += 1
921 problems += 1
919
922
920 # compiled modules
923 # compiled modules
921 ui.status(_("Checking extensions...\n"))
924 ui.status(_("Checking extensions...\n"))
922 try:
925 try:
923 import bdiff, mpatch, base85
926 import bdiff, mpatch, base85
924 except Exception, inst:
927 except Exception, inst:
925 ui.write(" %s\n" % inst)
928 ui.write(" %s\n" % inst)
926 ui.write(_(" One or more extensions could not be found"))
929 ui.write(_(" One or more extensions could not be found"))
927 ui.write(_(" (check that you compiled the extensions)\n"))
930 ui.write(_(" (check that you compiled the extensions)\n"))
928 problems += 1
931 problems += 1
929
932
930 # templates
933 # templates
931 ui.status(_("Checking templates...\n"))
934 ui.status(_("Checking templates...\n"))
932 try:
935 try:
933 import templater
936 import templater
934 templater.templater(templater.templatepath("map-cmdline.default"))
937 templater.templater(templater.templatepath("map-cmdline.default"))
935 except Exception, inst:
938 except Exception, inst:
936 ui.write(" %s\n" % inst)
939 ui.write(" %s\n" % inst)
937 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
940 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
938 problems += 1
941 problems += 1
939
942
940 # patch
943 # patch
941 ui.status(_("Checking patch...\n"))
944 ui.status(_("Checking patch...\n"))
942 patchproblems = 0
945 patchproblems = 0
943 a = "1\n2\n3\n4\n"
946 a = "1\n2\n3\n4\n"
944 b = "1\n2\n3\ninsert\n4\n"
947 b = "1\n2\n3\ninsert\n4\n"
945 fa = writetemp(a)
948 fa = writetemp(a)
946 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
949 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
947 os.path.basename(fa))
950 os.path.basename(fa))
948 fd = writetemp(d)
951 fd = writetemp(d)
949
952
950 files = {}
953 files = {}
951 try:
954 try:
952 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
955 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
953 except util.Abort, e:
956 except util.Abort, e:
954 ui.write(_(" patch call failed:\n"))
957 ui.write(_(" patch call failed:\n"))
955 ui.write(" " + str(e) + "\n")
958 ui.write(" " + str(e) + "\n")
956 patchproblems += 1
959 patchproblems += 1
957 else:
960 else:
958 if list(files) != [os.path.basename(fa)]:
961 if list(files) != [os.path.basename(fa)]:
959 ui.write(_(" unexpected patch output!\n"))
962 ui.write(_(" unexpected patch output!\n"))
960 patchproblems += 1
963 patchproblems += 1
961 a = file(fa).read()
964 a = file(fa).read()
962 if a != b:
965 if a != b:
963 ui.write(_(" patch test failed!\n"))
966 ui.write(_(" patch test failed!\n"))
964 patchproblems += 1
967 patchproblems += 1
965
968
966 if patchproblems:
969 if patchproblems:
967 if ui.config('ui', 'patch'):
970 if ui.config('ui', 'patch'):
968 ui.write(_(" (Current patch tool may be incompatible with patch,"
971 ui.write(_(" (Current patch tool may be incompatible with patch,"
969 " or misconfigured. Please check your .hgrc file)\n"))
972 " or misconfigured. Please check your .hgrc file)\n"))
970 else:
973 else:
971 ui.write(_(" Internal patcher failure, please report this error"
974 ui.write(_(" Internal patcher failure, please report this error"
972 " to http://www.selenic.com/mercurial/bts\n"))
975 " to http://www.selenic.com/mercurial/bts\n"))
973 problems += patchproblems
976 problems += patchproblems
974
977
975 os.unlink(fa)
978 os.unlink(fa)
976 os.unlink(fd)
979 os.unlink(fd)
977
980
978 # editor
981 # editor
979 ui.status(_("Checking commit editor...\n"))
982 ui.status(_("Checking commit editor...\n"))
980 editor = ui.geteditor()
983 editor = ui.geteditor()
981 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
984 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
982 if not cmdpath:
985 if not cmdpath:
983 if editor == 'vi':
986 if editor == 'vi':
984 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
987 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
985 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
988 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
986 else:
989 else:
987 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
990 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
988 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
991 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
989 problems += 1
992 problems += 1
990
993
991 # check username
994 # check username
992 ui.status(_("Checking username...\n"))
995 ui.status(_("Checking username...\n"))
993 user = os.environ.get("HGUSER")
996 user = os.environ.get("HGUSER")
994 if user is None:
997 if user is None:
995 user = ui.config("ui", "username")
998 user = ui.config("ui", "username")
996 if user is None:
999 if user is None:
997 user = os.environ.get("EMAIL")
1000 user = os.environ.get("EMAIL")
998 if not user:
1001 if not user:
999 ui.warn(" ")
1002 ui.warn(" ")
1000 ui.username()
1003 ui.username()
1001 ui.write(_(" (specify a username in your .hgrc file)\n"))
1004 ui.write(_(" (specify a username in your .hgrc file)\n"))
1002
1005
1003 if not problems:
1006 if not problems:
1004 ui.status(_("No problems detected\n"))
1007 ui.status(_("No problems detected\n"))
1005 else:
1008 else:
1006 ui.write(_("%s problems detected,"
1009 ui.write(_("%s problems detected,"
1007 " please check your install!\n") % problems)
1010 " please check your install!\n") % problems)
1008
1011
1009 return problems
1012 return problems
1010
1013
1011 def debugrename(ui, repo, file1, *pats, **opts):
1014 def debugrename(ui, repo, file1, *pats, **opts):
1012 """dump rename information"""
1015 """dump rename information"""
1013
1016
1014 ctx = repo[opts.get('rev')]
1017 ctx = repo[opts.get('rev')]
1015 m = cmdutil.match(repo, (file1,) + pats, opts)
1018 m = cmdutil.match(repo, (file1,) + pats, opts)
1016 for abs in ctx.walk(m):
1019 for abs in ctx.walk(m):
1017 fctx = ctx[abs]
1020 fctx = ctx[abs]
1018 o = fctx.filelog().renamed(fctx.filenode())
1021 o = fctx.filelog().renamed(fctx.filenode())
1019 rel = m.rel(abs)
1022 rel = m.rel(abs)
1020 if o:
1023 if o:
1021 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1024 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1022 else:
1025 else:
1023 ui.write(_("%s not renamed\n") % rel)
1026 ui.write(_("%s not renamed\n") % rel)
1024
1027
1025 def debugwalk(ui, repo, *pats, **opts):
1028 def debugwalk(ui, repo, *pats, **opts):
1026 """show how files match on given patterns"""
1029 """show how files match on given patterns"""
1027 m = cmdutil.match(repo, pats, opts)
1030 m = cmdutil.match(repo, pats, opts)
1028 items = list(repo.walk(m))
1031 items = list(repo.walk(m))
1029 if not items:
1032 if not items:
1030 return
1033 return
1031 fmt = 'f %%-%ds %%-%ds %%s' % (
1034 fmt = 'f %%-%ds %%-%ds %%s' % (
1032 max([len(abs) for abs in items]),
1035 max([len(abs) for abs in items]),
1033 max([len(m.rel(abs)) for abs in items]))
1036 max([len(m.rel(abs)) for abs in items]))
1034 for abs in items:
1037 for abs in items:
1035 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1038 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1036 ui.write("%s\n" % line.rstrip())
1039 ui.write("%s\n" % line.rstrip())
1037
1040
1038 def diff(ui, repo, *pats, **opts):
1041 def diff(ui, repo, *pats, **opts):
1039 """diff repository (or selected files)
1042 """diff repository (or selected files)
1040
1043
1041 Show differences between revisions for the specified files.
1044 Show differences between revisions for the specified files.
1042
1045
1043 Differences between files are shown using the unified diff format.
1046 Differences between files are shown using the unified diff format.
1044
1047
1045 NOTE: diff may generate unexpected results for merges, as it will
1048 NOTE: diff may generate unexpected results for merges, as it will
1046 default to comparing against the working directory's first parent
1049 default to comparing against the working directory's first parent
1047 changeset if no revisions are specified.
1050 changeset if no revisions are specified.
1048
1051
1049 When two revision arguments are given, then changes are shown
1052 When two revision arguments are given, then changes are shown
1050 between those revisions. If only one revision is specified then
1053 between those revisions. If only one revision is specified then
1051 that revision is compared to the working directory, and, when no
1054 that revision is compared to the working directory, and, when no
1052 revisions are specified, the working directory files are compared
1055 revisions are specified, the working directory files are compared
1053 to its parent.
1056 to its parent.
1054
1057
1055 Without the -a/--text option, diff will avoid generating diffs of
1058 Without the -a/--text option, diff will avoid generating diffs of
1056 files it detects as binary. With -a, diff will generate a diff
1059 files it detects as binary. With -a, diff will generate a diff
1057 anyway, probably with undesirable results.
1060 anyway, probably with undesirable results.
1058
1061
1059 Use the -g/--git option to generate diffs in the git extended diff
1062 Use the -g/--git option to generate diffs in the git extended diff
1060 format. For more information, read 'hg help diffs'.
1063 format. For more information, read 'hg help diffs'.
1061 """
1064 """
1062
1065
1063 revs = opts.get('rev')
1066 revs = opts.get('rev')
1064 change = opts.get('change')
1067 change = opts.get('change')
1065
1068
1066 if revs and change:
1069 if revs and change:
1067 msg = _('cannot specify --rev and --change at the same time')
1070 msg = _('cannot specify --rev and --change at the same time')
1068 raise util.Abort(msg)
1071 raise util.Abort(msg)
1069 elif change:
1072 elif change:
1070 node2 = repo.lookup(change)
1073 node2 = repo.lookup(change)
1071 node1 = repo[node2].parents()[0].node()
1074 node1 = repo[node2].parents()[0].node()
1072 else:
1075 else:
1073 node1, node2 = cmdutil.revpair(repo, revs)
1076 node1, node2 = cmdutil.revpair(repo, revs)
1074
1077
1075 m = cmdutil.match(repo, pats, opts)
1078 m = cmdutil.match(repo, pats, opts)
1076 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1079 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1077 for chunk in it:
1080 for chunk in it:
1078 repo.ui.write(chunk)
1081 repo.ui.write(chunk)
1079
1082
1080 def export(ui, repo, *changesets, **opts):
1083 def export(ui, repo, *changesets, **opts):
1081 """dump the header and diffs for one or more changesets
1084 """dump the header and diffs for one or more changesets
1082
1085
1083 Print the changeset header and diffs for one or more revisions.
1086 Print the changeset header and diffs for one or more revisions.
1084
1087
1085 The information shown in the changeset header is: author,
1088 The information shown in the changeset header is: author,
1086 changeset hash, parent(s) and commit comment.
1089 changeset hash, parent(s) and commit comment.
1087
1090
1088 NOTE: export may generate unexpected diff output for merge
1091 NOTE: export may generate unexpected diff output for merge
1089 changesets, as it will compare the merge changeset against its
1092 changesets, as it will compare the merge changeset against its
1090 first parent only.
1093 first parent only.
1091
1094
1092 Output may be to a file, in which case the name of the file is
1095 Output may be to a file, in which case the name of the file is
1093 given using a format string. The formatting rules are as follows:
1096 given using a format string. The formatting rules are as follows:
1094
1097
1095 %% literal "%" character
1098 %% literal "%" character
1096 %H changeset hash (40 bytes of hexadecimal)
1099 %H changeset hash (40 bytes of hexadecimal)
1097 %N number of patches being generated
1100 %N number of patches being generated
1098 %R changeset revision number
1101 %R changeset revision number
1099 %b basename of the exporting repository
1102 %b basename of the exporting repository
1100 %h short-form changeset hash (12 bytes of hexadecimal)
1103 %h short-form changeset hash (12 bytes of hexadecimal)
1101 %n zero-padded sequence number, starting at 1
1104 %n zero-padded sequence number, starting at 1
1102 %r zero-padded changeset revision number
1105 %r zero-padded changeset revision number
1103
1106
1104 Without the -a/--text option, export will avoid generating diffs
1107 Without the -a/--text option, export will avoid generating diffs
1105 of files it detects as binary. With -a, export will generate a
1108 of files it detects as binary. With -a, export will generate a
1106 diff anyway, probably with undesirable results.
1109 diff anyway, probably with undesirable results.
1107
1110
1108 Use the -g/--git option to generate diffs in the git extended diff
1111 Use the -g/--git option to generate diffs in the git extended diff
1109 format. Read the diffs help topic for more information.
1112 format. Read the diffs help topic for more information.
1110
1113
1111 With the --switch-parent option, the diff will be against the
1114 With the --switch-parent option, the diff will be against the
1112 second parent. It can be useful to review a merge.
1115 second parent. It can be useful to review a merge.
1113 """
1116 """
1114 if not changesets:
1117 if not changesets:
1115 raise util.Abort(_("export requires at least one changeset"))
1118 raise util.Abort(_("export requires at least one changeset"))
1116 revs = cmdutil.revrange(repo, changesets)
1119 revs = cmdutil.revrange(repo, changesets)
1117 if len(revs) > 1:
1120 if len(revs) > 1:
1118 ui.note(_('exporting patches:\n'))
1121 ui.note(_('exporting patches:\n'))
1119 else:
1122 else:
1120 ui.note(_('exporting patch:\n'))
1123 ui.note(_('exporting patch:\n'))
1121 patch.export(repo, revs, template=opts.get('output'),
1124 patch.export(repo, revs, template=opts.get('output'),
1122 switch_parent=opts.get('switch_parent'),
1125 switch_parent=opts.get('switch_parent'),
1123 opts=patch.diffopts(ui, opts))
1126 opts=patch.diffopts(ui, opts))
1124
1127
1125 def grep(ui, repo, pattern, *pats, **opts):
1128 def grep(ui, repo, pattern, *pats, **opts):
1126 """search for a pattern in specified files and revisions
1129 """search for a pattern in specified files and revisions
1127
1130
1128 Search revisions of files for a regular expression.
1131 Search revisions of files for a regular expression.
1129
1132
1130 This command behaves differently than Unix grep. It only accepts
1133 This command behaves differently than Unix grep. It only accepts
1131 Python/Perl regexps. It searches repository history, not the
1134 Python/Perl regexps. It searches repository history, not the
1132 working directory. It always prints the revision number in which a
1135 working directory. It always prints the revision number in which a
1133 match appears.
1136 match appears.
1134
1137
1135 By default, grep only prints output for the first revision of a
1138 By default, grep only prints output for the first revision of a
1136 file in which it finds a match. To get it to print every revision
1139 file in which it finds a match. To get it to print every revision
1137 that contains a change in match status ("-" for a match that
1140 that contains a change in match status ("-" for a match that
1138 becomes a non-match, or "+" for a non-match that becomes a match),
1141 becomes a non-match, or "+" for a non-match that becomes a match),
1139 use the --all flag.
1142 use the --all flag.
1140 """
1143 """
1141 reflags = 0
1144 reflags = 0
1142 if opts.get('ignore_case'):
1145 if opts.get('ignore_case'):
1143 reflags |= re.I
1146 reflags |= re.I
1144 try:
1147 try:
1145 regexp = re.compile(pattern, reflags)
1148 regexp = re.compile(pattern, reflags)
1146 except Exception, inst:
1149 except Exception, inst:
1147 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1150 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1148 return None
1151 return None
1149 sep, eol = ':', '\n'
1152 sep, eol = ':', '\n'
1150 if opts.get('print0'):
1153 if opts.get('print0'):
1151 sep = eol = '\0'
1154 sep = eol = '\0'
1152
1155
1153 fcache = {}
1156 fcache = {}
1154 def getfile(fn):
1157 def getfile(fn):
1155 if fn not in fcache:
1158 if fn not in fcache:
1156 fcache[fn] = repo.file(fn)
1159 fcache[fn] = repo.file(fn)
1157 return fcache[fn]
1160 return fcache[fn]
1158
1161
1159 def matchlines(body):
1162 def matchlines(body):
1160 begin = 0
1163 begin = 0
1161 linenum = 0
1164 linenum = 0
1162 while True:
1165 while True:
1163 match = regexp.search(body, begin)
1166 match = regexp.search(body, begin)
1164 if not match:
1167 if not match:
1165 break
1168 break
1166 mstart, mend = match.span()
1169 mstart, mend = match.span()
1167 linenum += body.count('\n', begin, mstart) + 1
1170 linenum += body.count('\n', begin, mstart) + 1
1168 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1171 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1169 begin = body.find('\n', mend) + 1 or len(body)
1172 begin = body.find('\n', mend) + 1 or len(body)
1170 lend = begin - 1
1173 lend = begin - 1
1171 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1174 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1172
1175
1173 class linestate(object):
1176 class linestate(object):
1174 def __init__(self, line, linenum, colstart, colend):
1177 def __init__(self, line, linenum, colstart, colend):
1175 self.line = line
1178 self.line = line
1176 self.linenum = linenum
1179 self.linenum = linenum
1177 self.colstart = colstart
1180 self.colstart = colstart
1178 self.colend = colend
1181 self.colend = colend
1179
1182
1180 def __hash__(self):
1183 def __hash__(self):
1181 return hash((self.linenum, self.line))
1184 return hash((self.linenum, self.line))
1182
1185
1183 def __eq__(self, other):
1186 def __eq__(self, other):
1184 return self.line == other.line
1187 return self.line == other.line
1185
1188
1186 matches = {}
1189 matches = {}
1187 copies = {}
1190 copies = {}
1188 def grepbody(fn, rev, body):
1191 def grepbody(fn, rev, body):
1189 matches[rev].setdefault(fn, [])
1192 matches[rev].setdefault(fn, [])
1190 m = matches[rev][fn]
1193 m = matches[rev][fn]
1191 for lnum, cstart, cend, line in matchlines(body):
1194 for lnum, cstart, cend, line in matchlines(body):
1192 s = linestate(line, lnum, cstart, cend)
1195 s = linestate(line, lnum, cstart, cend)
1193 m.append(s)
1196 m.append(s)
1194
1197
1195 def difflinestates(a, b):
1198 def difflinestates(a, b):
1196 sm = difflib.SequenceMatcher(None, a, b)
1199 sm = difflib.SequenceMatcher(None, a, b)
1197 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1200 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1198 if tag == 'insert':
1201 if tag == 'insert':
1199 for i in xrange(blo, bhi):
1202 for i in xrange(blo, bhi):
1200 yield ('+', b[i])
1203 yield ('+', b[i])
1201 elif tag == 'delete':
1204 elif tag == 'delete':
1202 for i in xrange(alo, ahi):
1205 for i in xrange(alo, ahi):
1203 yield ('-', a[i])
1206 yield ('-', a[i])
1204 elif tag == 'replace':
1207 elif tag == 'replace':
1205 for i in xrange(alo, ahi):
1208 for i in xrange(alo, ahi):
1206 yield ('-', a[i])
1209 yield ('-', a[i])
1207 for i in xrange(blo, bhi):
1210 for i in xrange(blo, bhi):
1208 yield ('+', b[i])
1211 yield ('+', b[i])
1209
1212
1210 prev = {}
1213 prev = {}
1211 def display(fn, rev, states, prevstates):
1214 def display(fn, rev, states, prevstates):
1212 datefunc = ui.quiet and util.shortdate or util.datestr
1215 datefunc = ui.quiet and util.shortdate or util.datestr
1213 found = False
1216 found = False
1214 filerevmatches = {}
1217 filerevmatches = {}
1215 r = prev.get(fn, -1)
1218 r = prev.get(fn, -1)
1216 if opts.get('all'):
1219 if opts.get('all'):
1217 iter = difflinestates(states, prevstates)
1220 iter = difflinestates(states, prevstates)
1218 else:
1221 else:
1219 iter = [('', l) for l in prevstates]
1222 iter = [('', l) for l in prevstates]
1220 for change, l in iter:
1223 for change, l in iter:
1221 cols = [fn, str(r)]
1224 cols = [fn, str(r)]
1222 if opts.get('line_number'):
1225 if opts.get('line_number'):
1223 cols.append(str(l.linenum))
1226 cols.append(str(l.linenum))
1224 if opts.get('all'):
1227 if opts.get('all'):
1225 cols.append(change)
1228 cols.append(change)
1226 if opts.get('user'):
1229 if opts.get('user'):
1227 cols.append(ui.shortuser(get(r)[1]))
1230 cols.append(ui.shortuser(get(r)[1]))
1228 if opts.get('date'):
1231 if opts.get('date'):
1229 cols.append(datefunc(get(r)[2]))
1232 cols.append(datefunc(get(r)[2]))
1230 if opts.get('files_with_matches'):
1233 if opts.get('files_with_matches'):
1231 c = (fn, r)
1234 c = (fn, r)
1232 if c in filerevmatches:
1235 if c in filerevmatches:
1233 continue
1236 continue
1234 filerevmatches[c] = 1
1237 filerevmatches[c] = 1
1235 else:
1238 else:
1236 cols.append(l.line)
1239 cols.append(l.line)
1237 ui.write(sep.join(cols), eol)
1240 ui.write(sep.join(cols), eol)
1238 found = True
1241 found = True
1239 return found
1242 return found
1240
1243
1241 fstate = {}
1244 fstate = {}
1242 skip = {}
1245 skip = {}
1243 get = util.cachefunc(lambda r: repo[r].changeset())
1246 get = util.cachefunc(lambda r: repo[r].changeset())
1244 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1247 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1245 found = False
1248 found = False
1246 follow = opts.get('follow')
1249 follow = opts.get('follow')
1247 for st, rev, fns in changeiter:
1250 for st, rev, fns in changeiter:
1248 if st == 'window':
1251 if st == 'window':
1249 matches.clear()
1252 matches.clear()
1250 elif st == 'add':
1253 elif st == 'add':
1251 ctx = repo[rev]
1254 ctx = repo[rev]
1252 matches[rev] = {}
1255 matches[rev] = {}
1253 for fn in fns:
1256 for fn in fns:
1254 if fn in skip:
1257 if fn in skip:
1255 continue
1258 continue
1256 try:
1259 try:
1257 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1260 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1258 fstate.setdefault(fn, [])
1261 fstate.setdefault(fn, [])
1259 if follow:
1262 if follow:
1260 copied = getfile(fn).renamed(ctx.filenode(fn))
1263 copied = getfile(fn).renamed(ctx.filenode(fn))
1261 if copied:
1264 if copied:
1262 copies.setdefault(rev, {})[fn] = copied[0]
1265 copies.setdefault(rev, {})[fn] = copied[0]
1263 except error.LookupError:
1266 except error.LookupError:
1264 pass
1267 pass
1265 elif st == 'iter':
1268 elif st == 'iter':
1266 for fn, m in sorted(matches[rev].items()):
1269 for fn, m in sorted(matches[rev].items()):
1267 copy = copies.get(rev, {}).get(fn)
1270 copy = copies.get(rev, {}).get(fn)
1268 if fn in skip:
1271 if fn in skip:
1269 if copy:
1272 if copy:
1270 skip[copy] = True
1273 skip[copy] = True
1271 continue
1274 continue
1272 if fn in prev or fstate[fn]:
1275 if fn in prev or fstate[fn]:
1273 r = display(fn, rev, m, fstate[fn])
1276 r = display(fn, rev, m, fstate[fn])
1274 found = found or r
1277 found = found or r
1275 if r and not opts.get('all'):
1278 if r and not opts.get('all'):
1276 skip[fn] = True
1279 skip[fn] = True
1277 if copy:
1280 if copy:
1278 skip[copy] = True
1281 skip[copy] = True
1279 fstate[fn] = m
1282 fstate[fn] = m
1280 if copy:
1283 if copy:
1281 fstate[copy] = m
1284 fstate[copy] = m
1282 prev[fn] = rev
1285 prev[fn] = rev
1283
1286
1284 for fn, state in sorted(fstate.items()):
1287 for fn, state in sorted(fstate.items()):
1285 if fn in skip:
1288 if fn in skip:
1286 continue
1289 continue
1287 if fn not in copies.get(prev[fn], {}):
1290 if fn not in copies.get(prev[fn], {}):
1288 found = display(fn, rev, {}, state) or found
1291 found = display(fn, rev, {}, state) or found
1289 return (not found and 1) or 0
1292 return (not found and 1) or 0
1290
1293
1291 def heads(ui, repo, *branchrevs, **opts):
1294 def heads(ui, repo, *branchrevs, **opts):
1292 """show current repository heads or show branch heads
1295 """show current repository heads or show branch heads
1293
1296
1294 With no arguments, show all repository head changesets.
1297 With no arguments, show all repository head changesets.
1295
1298
1296 If branch or revisions names are given this will show the heads of
1299 If branch or revisions names are given this will show the heads of
1297 the specified branches or the branches those revisions are tagged
1300 the specified branches or the branches those revisions are tagged
1298 with.
1301 with.
1299
1302
1300 Repository "heads" are changesets that don't have child
1303 Repository "heads" are changesets that don't have child
1301 changesets. They are where development generally takes place and
1304 changesets. They are where development generally takes place and
1302 are the usual targets for update and merge operations.
1305 are the usual targets for update and merge operations.
1303
1306
1304 Branch heads are changesets that have a given branch tag, but have
1307 Branch heads are changesets that have a given branch tag, but have
1305 no child changesets with that tag. They are usually where
1308 no child changesets with that tag. They are usually where
1306 development on the given branch takes place.
1309 development on the given branch takes place.
1307 """
1310 """
1308 if opts.get('rev'):
1311 if opts.get('rev'):
1309 start = repo.lookup(opts['rev'])
1312 start = repo.lookup(opts['rev'])
1310 else:
1313 else:
1311 start = None
1314 start = None
1312 closed = not opts.get('active')
1315 closed = not opts.get('active')
1313 if not branchrevs:
1316 if not branchrevs:
1314 # Assume we're looking repo-wide heads if no revs were specified.
1317 # Assume we're looking repo-wide heads if no revs were specified.
1315 heads = repo.heads(start, closed=closed)
1318 heads = repo.heads(start, closed=closed)
1316 else:
1319 else:
1317 heads = []
1320 heads = []
1318 visitedset = set()
1321 visitedset = set()
1319 for branchrev in branchrevs:
1322 for branchrev in branchrevs:
1320 branch = repo[branchrev].branch()
1323 branch = repo[branchrev].branch()
1321 if branch in visitedset:
1324 if branch in visitedset:
1322 continue
1325 continue
1323 visitedset.add(branch)
1326 visitedset.add(branch)
1324 bheads = repo.branchheads(branch, start, closed=closed)
1327 bheads = repo.branchheads(branch, start, closed=closed)
1325 if not bheads:
1328 if not bheads:
1326 if branch != branchrev:
1329 if branch != branchrev:
1327 ui.warn(_("no changes on branch %s containing %s are "
1330 ui.warn(_("no changes on branch %s containing %s are "
1328 "reachable from %s\n")
1331 "reachable from %s\n")
1329 % (branch, branchrev, opts.get('rev')))
1332 % (branch, branchrev, opts.get('rev')))
1330 else:
1333 else:
1331 ui.warn(_("no changes on branch %s are reachable from %s\n")
1334 ui.warn(_("no changes on branch %s are reachable from %s\n")
1332 % (branch, opts.get('rev')))
1335 % (branch, opts.get('rev')))
1333 heads.extend(bheads)
1336 heads.extend(bheads)
1334 if not heads:
1337 if not heads:
1335 return 1
1338 return 1
1336 displayer = cmdutil.show_changeset(ui, repo, opts)
1339 displayer = cmdutil.show_changeset(ui, repo, opts)
1337 for n in heads:
1340 for n in heads:
1338 displayer.show(repo[n])
1341 displayer.show(repo[n])
1339
1342
1340 def help_(ui, name=None, with_version=False):
1343 def help_(ui, name=None, with_version=False):
1341 """show help for a given topic or a help overview
1344 """show help for a given topic or a help overview
1342
1345
1343 With no arguments, print a list of commands and short help.
1346 With no arguments, print a list of commands and short help.
1344
1347
1345 Given a topic, extension, or command name, print help for that
1348 Given a topic, extension, or command name, print help for that
1346 topic."""
1349 topic."""
1347 option_lists = []
1350 option_lists = []
1348
1351
1349 def addglobalopts(aliases):
1352 def addglobalopts(aliases):
1350 if ui.verbose:
1353 if ui.verbose:
1351 option_lists.append((_("global options:"), globalopts))
1354 option_lists.append((_("global options:"), globalopts))
1352 if name == 'shortlist':
1355 if name == 'shortlist':
1353 option_lists.append((_('use "hg help" for the full list '
1356 option_lists.append((_('use "hg help" for the full list '
1354 'of commands'), ()))
1357 'of commands'), ()))
1355 else:
1358 else:
1356 if name == 'shortlist':
1359 if name == 'shortlist':
1357 msg = _('use "hg help" for the full list of commands '
1360 msg = _('use "hg help" for the full list of commands '
1358 'or "hg -v" for details')
1361 'or "hg -v" for details')
1359 elif aliases:
1362 elif aliases:
1360 msg = _('use "hg -v help%s" to show aliases and '
1363 msg = _('use "hg -v help%s" to show aliases and '
1361 'global options') % (name and " " + name or "")
1364 'global options') % (name and " " + name or "")
1362 else:
1365 else:
1363 msg = _('use "hg -v help %s" to show global options') % name
1366 msg = _('use "hg -v help %s" to show global options') % name
1364 option_lists.append((msg, ()))
1367 option_lists.append((msg, ()))
1365
1368
1366 def helpcmd(name):
1369 def helpcmd(name):
1367 if with_version:
1370 if with_version:
1368 version_(ui)
1371 version_(ui)
1369 ui.write('\n')
1372 ui.write('\n')
1370
1373
1371 try:
1374 try:
1372 aliases, i = cmdutil.findcmd(name, table, False)
1375 aliases, i = cmdutil.findcmd(name, table, False)
1373 except error.AmbiguousCommand, inst:
1376 except error.AmbiguousCommand, inst:
1374 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1377 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1375 helplist(_('list of commands:\n\n'), select)
1378 helplist(_('list of commands:\n\n'), select)
1376 return
1379 return
1377
1380
1378 # synopsis
1381 # synopsis
1379 if len(i) > 2:
1382 if len(i) > 2:
1380 if i[2].startswith('hg'):
1383 if i[2].startswith('hg'):
1381 ui.write("%s\n" % i[2])
1384 ui.write("%s\n" % i[2])
1382 else:
1385 else:
1383 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1386 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1384 else:
1387 else:
1385 ui.write('hg %s\n' % aliases[0])
1388 ui.write('hg %s\n' % aliases[0])
1386
1389
1387 # aliases
1390 # aliases
1388 if not ui.quiet and len(aliases) > 1:
1391 if not ui.quiet and len(aliases) > 1:
1389 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1392 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1390
1393
1391 # description
1394 # description
1392 doc = gettext(i[0].__doc__)
1395 doc = gettext(i[0].__doc__)
1393 if not doc:
1396 if not doc:
1394 doc = _("(no help text available)")
1397 doc = _("(no help text available)")
1395 if ui.quiet:
1398 if ui.quiet:
1396 doc = doc.splitlines(0)[0]
1399 doc = doc.splitlines(0)[0]
1397 ui.write("\n%s\n" % doc.rstrip())
1400 ui.write("\n%s\n" % doc.rstrip())
1398
1401
1399 if not ui.quiet:
1402 if not ui.quiet:
1400 # options
1403 # options
1401 if i[1]:
1404 if i[1]:
1402 option_lists.append((_("options:\n"), i[1]))
1405 option_lists.append((_("options:\n"), i[1]))
1403
1406
1404 addglobalopts(False)
1407 addglobalopts(False)
1405
1408
1406 def helplist(header, select=None):
1409 def helplist(header, select=None):
1407 h = {}
1410 h = {}
1408 cmds = {}
1411 cmds = {}
1409 for c, e in table.iteritems():
1412 for c, e in table.iteritems():
1410 f = c.split("|", 1)[0]
1413 f = c.split("|", 1)[0]
1411 if select and not select(f):
1414 if select and not select(f):
1412 continue
1415 continue
1413 if (not select and name != 'shortlist' and
1416 if (not select and name != 'shortlist' and
1414 e[0].__module__ != __name__):
1417 e[0].__module__ != __name__):
1415 continue
1418 continue
1416 if name == "shortlist" and not f.startswith("^"):
1419 if name == "shortlist" and not f.startswith("^"):
1417 continue
1420 continue
1418 f = f.lstrip("^")
1421 f = f.lstrip("^")
1419 if not ui.debugflag and f.startswith("debug"):
1422 if not ui.debugflag and f.startswith("debug"):
1420 continue
1423 continue
1421 doc = gettext(e[0].__doc__)
1424 doc = gettext(e[0].__doc__)
1422 if not doc:
1425 if not doc:
1423 doc = _("(no help text available)")
1426 doc = _("(no help text available)")
1424 h[f] = doc.splitlines(0)[0].rstrip()
1427 h[f] = doc.splitlines(0)[0].rstrip()
1425 cmds[f] = c.lstrip("^")
1428 cmds[f] = c.lstrip("^")
1426
1429
1427 if not h:
1430 if not h:
1428 ui.status(_('no commands defined\n'))
1431 ui.status(_('no commands defined\n'))
1429 return
1432 return
1430
1433
1431 ui.status(header)
1434 ui.status(header)
1432 fns = sorted(h)
1435 fns = sorted(h)
1433 m = max(map(len, fns))
1436 m = max(map(len, fns))
1434 for f in fns:
1437 for f in fns:
1435 if ui.verbose:
1438 if ui.verbose:
1436 commands = cmds[f].replace("|",", ")
1439 commands = cmds[f].replace("|",", ")
1437 ui.write(" %s:\n %s\n"%(commands, h[f]))
1440 ui.write(" %s:\n %s\n"%(commands, h[f]))
1438 else:
1441 else:
1439 ui.write(' %-*s %s\n' % (m, f, h[f]))
1442 ui.write(' %-*s %s\n' % (m, f, h[f]))
1440
1443
1441 exts = list(extensions.extensions())
1444 exts = list(extensions.extensions())
1442 if exts and name != 'shortlist':
1445 if exts and name != 'shortlist':
1443 ui.write(_('\nenabled extensions:\n\n'))
1446 ui.write(_('\nenabled extensions:\n\n'))
1444 maxlength = 0
1447 maxlength = 0
1445 exthelps = []
1448 exthelps = []
1446 for ename, ext in exts:
1449 for ename, ext in exts:
1447 doc = (gettext(ext.__doc__) or _('(no help text available)'))
1450 doc = (gettext(ext.__doc__) or _('(no help text available)'))
1448 ename = ename.split('.')[-1]
1451 ename = ename.split('.')[-1]
1449 maxlength = max(len(ename), maxlength)
1452 maxlength = max(len(ename), maxlength)
1450 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1453 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1451 for ename, text in exthelps:
1454 for ename, text in exthelps:
1452 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1455 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1453
1456
1454 if not ui.quiet:
1457 if not ui.quiet:
1455 addglobalopts(True)
1458 addglobalopts(True)
1456
1459
1457 def helptopic(name):
1460 def helptopic(name):
1458 for names, header, doc in help.helptable:
1461 for names, header, doc in help.helptable:
1459 if name in names:
1462 if name in names:
1460 break
1463 break
1461 else:
1464 else:
1462 raise error.UnknownCommand(name)
1465 raise error.UnknownCommand(name)
1463
1466
1464 # description
1467 # description
1465 if not doc:
1468 if not doc:
1466 doc = _("(no help text available)")
1469 doc = _("(no help text available)")
1467 if hasattr(doc, '__call__'):
1470 if hasattr(doc, '__call__'):
1468 doc = doc()
1471 doc = doc()
1469
1472
1470 ui.write("%s\n" % header)
1473 ui.write("%s\n" % header)
1471 ui.write("%s\n" % doc.rstrip())
1474 ui.write("%s\n" % doc.rstrip())
1472
1475
1473 def helpext(name):
1476 def helpext(name):
1474 try:
1477 try:
1475 mod = extensions.find(name)
1478 mod = extensions.find(name)
1476 except KeyError:
1479 except KeyError:
1477 raise error.UnknownCommand(name)
1480 raise error.UnknownCommand(name)
1478
1481
1479 doc = gettext(mod.__doc__) or _('no help text available')
1482 doc = gettext(mod.__doc__) or _('no help text available')
1480 doc = doc.splitlines(0)
1483 doc = doc.splitlines(0)
1481 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1484 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1482 for d in doc[1:]:
1485 for d in doc[1:]:
1483 ui.write(d, '\n')
1486 ui.write(d, '\n')
1484
1487
1485 ui.status('\n')
1488 ui.status('\n')
1486
1489
1487 try:
1490 try:
1488 ct = mod.cmdtable
1491 ct = mod.cmdtable
1489 except AttributeError:
1492 except AttributeError:
1490 ct = {}
1493 ct = {}
1491
1494
1492 modcmds = set([c.split('|', 1)[0] for c in ct])
1495 modcmds = set([c.split('|', 1)[0] for c in ct])
1493 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1496 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1494
1497
1495 if name and name != 'shortlist':
1498 if name and name != 'shortlist':
1496 i = None
1499 i = None
1497 for f in (helptopic, helpcmd, helpext):
1500 for f in (helptopic, helpcmd, helpext):
1498 try:
1501 try:
1499 f(name)
1502 f(name)
1500 i = None
1503 i = None
1501 break
1504 break
1502 except error.UnknownCommand, inst:
1505 except error.UnknownCommand, inst:
1503 i = inst
1506 i = inst
1504 if i:
1507 if i:
1505 raise i
1508 raise i
1506
1509
1507 else:
1510 else:
1508 # program name
1511 # program name
1509 if ui.verbose or with_version:
1512 if ui.verbose or with_version:
1510 version_(ui)
1513 version_(ui)
1511 else:
1514 else:
1512 ui.status(_("Mercurial Distributed SCM\n"))
1515 ui.status(_("Mercurial Distributed SCM\n"))
1513 ui.status('\n')
1516 ui.status('\n')
1514
1517
1515 # list of commands
1518 # list of commands
1516 if name == "shortlist":
1519 if name == "shortlist":
1517 header = _('basic commands:\n\n')
1520 header = _('basic commands:\n\n')
1518 else:
1521 else:
1519 header = _('list of commands:\n\n')
1522 header = _('list of commands:\n\n')
1520
1523
1521 helplist(header)
1524 helplist(header)
1522
1525
1523 # list all option lists
1526 # list all option lists
1524 opt_output = []
1527 opt_output = []
1525 for title, options in option_lists:
1528 for title, options in option_lists:
1526 opt_output.append(("\n%s" % title, None))
1529 opt_output.append(("\n%s" % title, None))
1527 for shortopt, longopt, default, desc in options:
1530 for shortopt, longopt, default, desc in options:
1528 if "DEPRECATED" in desc and not ui.verbose: continue
1531 if "DEPRECATED" in desc and not ui.verbose: continue
1529 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1532 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1530 longopt and " --%s" % longopt),
1533 longopt and " --%s" % longopt),
1531 "%s%s" % (desc,
1534 "%s%s" % (desc,
1532 default
1535 default
1533 and _(" (default: %s)") % default
1536 and _(" (default: %s)") % default
1534 or "")))
1537 or "")))
1535
1538
1536 if not name:
1539 if not name:
1537 ui.write(_("\nadditional help topics:\n\n"))
1540 ui.write(_("\nadditional help topics:\n\n"))
1538 topics = []
1541 topics = []
1539 for names, header, doc in help.helptable:
1542 for names, header, doc in help.helptable:
1540 names = [(-len(name), name) for name in names]
1543 names = [(-len(name), name) for name in names]
1541 names.sort()
1544 names.sort()
1542 topics.append((names[0][1], header))
1545 topics.append((names[0][1], header))
1543 topics_len = max([len(s[0]) for s in topics])
1546 topics_len = max([len(s[0]) for s in topics])
1544 for t, desc in topics:
1547 for t, desc in topics:
1545 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1548 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1546
1549
1547 if opt_output:
1550 if opt_output:
1548 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1551 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1549 for first, second in opt_output:
1552 for first, second in opt_output:
1550 if second:
1553 if second:
1551 # wrap descriptions at 70 characters, just like the
1554 # wrap descriptions at 70 characters, just like the
1552 # main help texts
1555 # main help texts
1553 second = textwrap.wrap(second, width=70 - opts_len - 3)
1556 second = textwrap.wrap(second, width=70 - opts_len - 3)
1554 pad = '\n' + ' ' * (opts_len + 3)
1557 pad = '\n' + ' ' * (opts_len + 3)
1555 ui.write(" %-*s %s\n" % (opts_len, first, pad.join(second)))
1558 ui.write(" %-*s %s\n" % (opts_len, first, pad.join(second)))
1556 else:
1559 else:
1557 ui.write("%s\n" % first)
1560 ui.write("%s\n" % first)
1558
1561
1559 def identify(ui, repo, source=None,
1562 def identify(ui, repo, source=None,
1560 rev=None, num=None, id=None, branch=None, tags=None):
1563 rev=None, num=None, id=None, branch=None, tags=None):
1561 """identify the working copy or specified revision
1564 """identify the working copy or specified revision
1562
1565
1563 With no revision, print a summary of the current state of the
1566 With no revision, print a summary of the current state of the
1564 repository.
1567 repository.
1565
1568
1566 With a path, do a lookup in another repository.
1569 With a path, do a lookup in another repository.
1567
1570
1568 This summary identifies the repository state using one or two
1571 This summary identifies the repository state using one or two
1569 parent hash identifiers, followed by a "+" if there are
1572 parent hash identifiers, followed by a "+" if there are
1570 uncommitted changes in the working directory, a list of tags for
1573 uncommitted changes in the working directory, a list of tags for
1571 this revision and a branch name for non-default branches.
1574 this revision and a branch name for non-default branches.
1572 """
1575 """
1573
1576
1574 if not repo and not source:
1577 if not repo and not source:
1575 raise util.Abort(_("There is no Mercurial repository here "
1578 raise util.Abort(_("There is no Mercurial repository here "
1576 "(.hg not found)"))
1579 "(.hg not found)"))
1577
1580
1578 hexfunc = ui.debugflag and hex or short
1581 hexfunc = ui.debugflag and hex or short
1579 default = not (num or id or branch or tags)
1582 default = not (num or id or branch or tags)
1580 output = []
1583 output = []
1581
1584
1582 revs = []
1585 revs = []
1583 if source:
1586 if source:
1584 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1587 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1585 repo = hg.repository(ui, source)
1588 repo = hg.repository(ui, source)
1586
1589
1587 if not repo.local():
1590 if not repo.local():
1588 if not rev and revs:
1591 if not rev and revs:
1589 rev = revs[0]
1592 rev = revs[0]
1590 if not rev:
1593 if not rev:
1591 rev = "tip"
1594 rev = "tip"
1592 if num or branch or tags:
1595 if num or branch or tags:
1593 raise util.Abort(
1596 raise util.Abort(
1594 "can't query remote revision number, branch, or tags")
1597 "can't query remote revision number, branch, or tags")
1595 output = [hexfunc(repo.lookup(rev))]
1598 output = [hexfunc(repo.lookup(rev))]
1596 elif not rev:
1599 elif not rev:
1597 ctx = repo[None]
1600 ctx = repo[None]
1598 parents = ctx.parents()
1601 parents = ctx.parents()
1599 changed = False
1602 changed = False
1600 if default or id or num:
1603 if default or id or num:
1601 changed = ctx.files() + ctx.deleted()
1604 changed = ctx.files() + ctx.deleted()
1602 if default or id:
1605 if default or id:
1603 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1606 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1604 (changed) and "+" or "")]
1607 (changed) and "+" or "")]
1605 if num:
1608 if num:
1606 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1609 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1607 (changed) and "+" or ""))
1610 (changed) and "+" or ""))
1608 else:
1611 else:
1609 ctx = repo[rev]
1612 ctx = repo[rev]
1610 if default or id:
1613 if default or id:
1611 output = [hexfunc(ctx.node())]
1614 output = [hexfunc(ctx.node())]
1612 if num:
1615 if num:
1613 output.append(str(ctx.rev()))
1616 output.append(str(ctx.rev()))
1614
1617
1615 if repo.local() and default and not ui.quiet:
1618 if repo.local() and default and not ui.quiet:
1616 b = encoding.tolocal(ctx.branch())
1619 b = encoding.tolocal(ctx.branch())
1617 if b != 'default':
1620 if b != 'default':
1618 output.append("(%s)" % b)
1621 output.append("(%s)" % b)
1619
1622
1620 # multiple tags for a single parent separated by '/'
1623 # multiple tags for a single parent separated by '/'
1621 t = "/".join(ctx.tags())
1624 t = "/".join(ctx.tags())
1622 if t:
1625 if t:
1623 output.append(t)
1626 output.append(t)
1624
1627
1625 if branch:
1628 if branch:
1626 output.append(encoding.tolocal(ctx.branch()))
1629 output.append(encoding.tolocal(ctx.branch()))
1627
1630
1628 if tags:
1631 if tags:
1629 output.extend(ctx.tags())
1632 output.extend(ctx.tags())
1630
1633
1631 ui.write("%s\n" % ' '.join(output))
1634 ui.write("%s\n" % ' '.join(output))
1632
1635
1633 def import_(ui, repo, patch1, *patches, **opts):
1636 def import_(ui, repo, patch1, *patches, **opts):
1634 """import an ordered set of patches
1637 """import an ordered set of patches
1635
1638
1636 Import a list of patches and commit them individually.
1639 Import a list of patches and commit them individually.
1637
1640
1638 If there are outstanding changes in the working directory, import
1641 If there are outstanding changes in the working directory, import
1639 will abort unless given the -f/--force flag.
1642 will abort unless given the -f/--force flag.
1640
1643
1641 You can import a patch straight from a mail message. Even patches
1644 You can import a patch straight from a mail message. Even patches
1642 as attachments work (body part must be type text/plain or
1645 as attachments work (body part must be type text/plain or
1643 text/x-patch to be used). From and Subject headers of email
1646 text/x-patch to be used). From and Subject headers of email
1644 message are used as default committer and commit message. All
1647 message are used as default committer and commit message. All
1645 text/plain body parts before first diff are added to commit
1648 text/plain body parts before first diff are added to commit
1646 message.
1649 message.
1647
1650
1648 If the imported patch was generated by hg export, user and
1651 If the imported patch was generated by hg export, user and
1649 description from patch override values from message headers and
1652 description from patch override values from message headers and
1650 body. Values given on command line with -m/--message and -u/--user
1653 body. Values given on command line with -m/--message and -u/--user
1651 override these.
1654 override these.
1652
1655
1653 If --exact is specified, import will set the working directory to
1656 If --exact is specified, import will set the working directory to
1654 the parent of each patch before applying it, and will abort if the
1657 the parent of each patch before applying it, and will abort if the
1655 resulting changeset has a different ID than the one recorded in
1658 resulting changeset has a different ID than the one recorded in
1656 the patch. This may happen due to character set problems or other
1659 the patch. This may happen due to character set problems or other
1657 deficiencies in the text patch format.
1660 deficiencies in the text patch format.
1658
1661
1659 With -s/--similarity, hg will attempt to discover renames and
1662 With -s/--similarity, hg will attempt to discover renames and
1660 copies in the patch in the same way as 'addremove'.
1663 copies in the patch in the same way as 'addremove'.
1661
1664
1662 To read a patch from standard input, use patch name "-". See 'hg
1665 To read a patch from standard input, use patch name "-". See 'hg
1663 help dates' for a list of formats valid for -d/--date.
1666 help dates' for a list of formats valid for -d/--date.
1664 """
1667 """
1665 patches = (patch1,) + patches
1668 patches = (patch1,) + patches
1666
1669
1667 date = opts.get('date')
1670 date = opts.get('date')
1668 if date:
1671 if date:
1669 opts['date'] = util.parsedate(date)
1672 opts['date'] = util.parsedate(date)
1670
1673
1671 try:
1674 try:
1672 sim = float(opts.get('similarity') or 0)
1675 sim = float(opts.get('similarity') or 0)
1673 except ValueError:
1676 except ValueError:
1674 raise util.Abort(_('similarity must be a number'))
1677 raise util.Abort(_('similarity must be a number'))
1675 if sim < 0 or sim > 100:
1678 if sim < 0 or sim > 100:
1676 raise util.Abort(_('similarity must be between 0 and 100'))
1679 raise util.Abort(_('similarity must be between 0 and 100'))
1677
1680
1678 if opts.get('exact') or not opts.get('force'):
1681 if opts.get('exact') or not opts.get('force'):
1679 cmdutil.bail_if_changed(repo)
1682 cmdutil.bail_if_changed(repo)
1680
1683
1681 d = opts["base"]
1684 d = opts["base"]
1682 strip = opts["strip"]
1685 strip = opts["strip"]
1683 wlock = lock = None
1686 wlock = lock = None
1684 try:
1687 try:
1685 wlock = repo.wlock()
1688 wlock = repo.wlock()
1686 lock = repo.lock()
1689 lock = repo.lock()
1687 for p in patches:
1690 for p in patches:
1688 pf = os.path.join(d, p)
1691 pf = os.path.join(d, p)
1689
1692
1690 if pf == '-':
1693 if pf == '-':
1691 ui.status(_("applying patch from stdin\n"))
1694 ui.status(_("applying patch from stdin\n"))
1692 pf = sys.stdin
1695 pf = sys.stdin
1693 else:
1696 else:
1694 ui.status(_("applying %s\n") % p)
1697 ui.status(_("applying %s\n") % p)
1695 pf = url.open(ui, pf)
1698 pf = url.open(ui, pf)
1696 data = patch.extract(ui, pf)
1699 data = patch.extract(ui, pf)
1697 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1700 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1698
1701
1699 if tmpname is None:
1702 if tmpname is None:
1700 raise util.Abort(_('no diffs found'))
1703 raise util.Abort(_('no diffs found'))
1701
1704
1702 try:
1705 try:
1703 cmdline_message = cmdutil.logmessage(opts)
1706 cmdline_message = cmdutil.logmessage(opts)
1704 if cmdline_message:
1707 if cmdline_message:
1705 # pickup the cmdline msg
1708 # pickup the cmdline msg
1706 message = cmdline_message
1709 message = cmdline_message
1707 elif message:
1710 elif message:
1708 # pickup the patch msg
1711 # pickup the patch msg
1709 message = message.strip()
1712 message = message.strip()
1710 else:
1713 else:
1711 # launch the editor
1714 # launch the editor
1712 message = None
1715 message = None
1713 ui.debug(_('message:\n%s\n') % message)
1716 ui.debug(_('message:\n%s\n') % message)
1714
1717
1715 wp = repo.parents()
1718 wp = repo.parents()
1716 if opts.get('exact'):
1719 if opts.get('exact'):
1717 if not nodeid or not p1:
1720 if not nodeid or not p1:
1718 raise util.Abort(_('not a mercurial patch'))
1721 raise util.Abort(_('not a mercurial patch'))
1719 p1 = repo.lookup(p1)
1722 p1 = repo.lookup(p1)
1720 p2 = repo.lookup(p2 or hex(nullid))
1723 p2 = repo.lookup(p2 or hex(nullid))
1721
1724
1722 if p1 != wp[0].node():
1725 if p1 != wp[0].node():
1723 hg.clean(repo, p1)
1726 hg.clean(repo, p1)
1724 repo.dirstate.setparents(p1, p2)
1727 repo.dirstate.setparents(p1, p2)
1725 elif p2:
1728 elif p2:
1726 try:
1729 try:
1727 p1 = repo.lookup(p1)
1730 p1 = repo.lookup(p1)
1728 p2 = repo.lookup(p2)
1731 p2 = repo.lookup(p2)
1729 if p1 == wp[0].node():
1732 if p1 == wp[0].node():
1730 repo.dirstate.setparents(p1, p2)
1733 repo.dirstate.setparents(p1, p2)
1731 except error.RepoError:
1734 except error.RepoError:
1732 pass
1735 pass
1733 if opts.get('exact') or opts.get('import_branch'):
1736 if opts.get('exact') or opts.get('import_branch'):
1734 repo.dirstate.setbranch(branch or 'default')
1737 repo.dirstate.setbranch(branch or 'default')
1735
1738
1736 files = {}
1739 files = {}
1737 try:
1740 try:
1738 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1741 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1739 files=files)
1742 files=files)
1740 finally:
1743 finally:
1741 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1744 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1742 if not opts.get('no_commit'):
1745 if not opts.get('no_commit'):
1743 n = repo.commit(files, message, opts.get('user') or user,
1746 n = repo.commit(files, message, opts.get('user') or user,
1744 opts.get('date') or date)
1747 opts.get('date') or date,
1748 editor=cmdutil.commiteditor)
1745 if opts.get('exact'):
1749 if opts.get('exact'):
1746 if hex(n) != nodeid:
1750 if hex(n) != nodeid:
1747 repo.rollback()
1751 repo.rollback()
1748 raise util.Abort(_('patch is damaged'
1752 raise util.Abort(_('patch is damaged'
1749 ' or loses information'))
1753 ' or loses information'))
1750 # Force a dirstate write so that the next transaction
1754 # Force a dirstate write so that the next transaction
1751 # backups an up-do-date file.
1755 # backups an up-do-date file.
1752 repo.dirstate.write()
1756 repo.dirstate.write()
1753 finally:
1757 finally:
1754 os.unlink(tmpname)
1758 os.unlink(tmpname)
1755 finally:
1759 finally:
1756 release(lock, wlock)
1760 release(lock, wlock)
1757
1761
1758 def incoming(ui, repo, source="default", **opts):
1762 def incoming(ui, repo, source="default", **opts):
1759 """show new changesets found in source
1763 """show new changesets found in source
1760
1764
1761 Show new changesets found in the specified path/URL or the default
1765 Show new changesets found in the specified path/URL or the default
1762 pull location. These are the changesets that would be pulled if a
1766 pull location. These are the changesets that would be pulled if a
1763 pull was requested.
1767 pull was requested.
1764
1768
1765 For remote repository, using --bundle avoids downloading the
1769 For remote repository, using --bundle avoids downloading the
1766 changesets twice if the incoming is followed by a pull.
1770 changesets twice if the incoming is followed by a pull.
1767
1771
1768 See pull for valid source format details.
1772 See pull for valid source format details.
1769 """
1773 """
1770 limit = cmdutil.loglimit(opts)
1774 limit = cmdutil.loglimit(opts)
1771 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1775 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1772 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1776 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1773 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1777 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1774 if revs:
1778 if revs:
1775 revs = [other.lookup(rev) for rev in revs]
1779 revs = [other.lookup(rev) for rev in revs]
1776 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1780 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1777 force=opts["force"])
1781 force=opts["force"])
1778 if not incoming:
1782 if not incoming:
1779 try:
1783 try:
1780 os.unlink(opts["bundle"])
1784 os.unlink(opts["bundle"])
1781 except:
1785 except:
1782 pass
1786 pass
1783 ui.status(_("no changes found\n"))
1787 ui.status(_("no changes found\n"))
1784 return 1
1788 return 1
1785
1789
1786 cleanup = None
1790 cleanup = None
1787 try:
1791 try:
1788 fname = opts["bundle"]
1792 fname = opts["bundle"]
1789 if fname or not other.local():
1793 if fname or not other.local():
1790 # create a bundle (uncompressed if other repo is not local)
1794 # create a bundle (uncompressed if other repo is not local)
1791
1795
1792 if revs is None and other.capable('changegroupsubset'):
1796 if revs is None and other.capable('changegroupsubset'):
1793 revs = rheads
1797 revs = rheads
1794
1798
1795 if revs is None:
1799 if revs is None:
1796 cg = other.changegroup(incoming, "incoming")
1800 cg = other.changegroup(incoming, "incoming")
1797 else:
1801 else:
1798 cg = other.changegroupsubset(incoming, revs, 'incoming')
1802 cg = other.changegroupsubset(incoming, revs, 'incoming')
1799 bundletype = other.local() and "HG10BZ" or "HG10UN"
1803 bundletype = other.local() and "HG10BZ" or "HG10UN"
1800 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1804 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1801 # keep written bundle?
1805 # keep written bundle?
1802 if opts["bundle"]:
1806 if opts["bundle"]:
1803 cleanup = None
1807 cleanup = None
1804 if not other.local():
1808 if not other.local():
1805 # use the created uncompressed bundlerepo
1809 # use the created uncompressed bundlerepo
1806 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1810 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1807
1811
1808 o = other.changelog.nodesbetween(incoming, revs)[0]
1812 o = other.changelog.nodesbetween(incoming, revs)[0]
1809 if opts.get('newest_first'):
1813 if opts.get('newest_first'):
1810 o.reverse()
1814 o.reverse()
1811 displayer = cmdutil.show_changeset(ui, other, opts)
1815 displayer = cmdutil.show_changeset(ui, other, opts)
1812 count = 0
1816 count = 0
1813 for n in o:
1817 for n in o:
1814 if count >= limit:
1818 if count >= limit:
1815 break
1819 break
1816 parents = [p for p in other.changelog.parents(n) if p != nullid]
1820 parents = [p for p in other.changelog.parents(n) if p != nullid]
1817 if opts.get('no_merges') and len(parents) == 2:
1821 if opts.get('no_merges') and len(parents) == 2:
1818 continue
1822 continue
1819 count += 1
1823 count += 1
1820 displayer.show(other[n])
1824 displayer.show(other[n])
1821 finally:
1825 finally:
1822 if hasattr(other, 'close'):
1826 if hasattr(other, 'close'):
1823 other.close()
1827 other.close()
1824 if cleanup:
1828 if cleanup:
1825 os.unlink(cleanup)
1829 os.unlink(cleanup)
1826
1830
1827 def init(ui, dest=".", **opts):
1831 def init(ui, dest=".", **opts):
1828 """create a new repository in the given directory
1832 """create a new repository in the given directory
1829
1833
1830 Initialize a new repository in the given directory. If the given
1834 Initialize a new repository in the given directory. If the given
1831 directory does not exist, it is created.
1835 directory does not exist, it is created.
1832
1836
1833 If no directory is given, the current directory is used.
1837 If no directory is given, the current directory is used.
1834
1838
1835 It is possible to specify an ssh:// URL as the destination.
1839 It is possible to specify an ssh:// URL as the destination.
1836 See 'hg help urls' for more information.
1840 See 'hg help urls' for more information.
1837 """
1841 """
1838 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1842 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1839
1843
1840 def locate(ui, repo, *pats, **opts):
1844 def locate(ui, repo, *pats, **opts):
1841 """locate files matching specific patterns
1845 """locate files matching specific patterns
1842
1846
1843 Print all files under Mercurial control whose names match the
1847 Print all files under Mercurial control whose names match the
1844 given patterns.
1848 given patterns.
1845
1849
1846 This command searches the entire repository by default. To search
1850 This command searches the entire repository by default. To search
1847 just the current directory and its subdirectories, use
1851 just the current directory and its subdirectories, use
1848 "--include .".
1852 "--include .".
1849
1853
1850 If no patterns are given to match, this command prints all file
1854 If no patterns are given to match, this command prints all file
1851 names.
1855 names.
1852
1856
1853 If you want to feed the output of this command into the "xargs"
1857 If you want to feed the output of this command into the "xargs"
1854 command, use the -0 option to both this command and "xargs". This
1858 command, use the -0 option to both this command and "xargs". This
1855 will avoid the problem of "xargs" treating single filenames that
1859 will avoid the problem of "xargs" treating single filenames that
1856 contain white space as multiple filenames.
1860 contain white space as multiple filenames.
1857 """
1861 """
1858 end = opts.get('print0') and '\0' or '\n'
1862 end = opts.get('print0') and '\0' or '\n'
1859 rev = opts.get('rev') or None
1863 rev = opts.get('rev') or None
1860
1864
1861 ret = 1
1865 ret = 1
1862 m = cmdutil.match(repo, pats, opts, default='relglob')
1866 m = cmdutil.match(repo, pats, opts, default='relglob')
1863 m.bad = lambda x,y: False
1867 m.bad = lambda x,y: False
1864 for abs in repo[rev].walk(m):
1868 for abs in repo[rev].walk(m):
1865 if not rev and abs not in repo.dirstate:
1869 if not rev and abs not in repo.dirstate:
1866 continue
1870 continue
1867 if opts.get('fullpath'):
1871 if opts.get('fullpath'):
1868 ui.write(repo.wjoin(abs), end)
1872 ui.write(repo.wjoin(abs), end)
1869 else:
1873 else:
1870 ui.write(((pats and m.rel(abs)) or abs), end)
1874 ui.write(((pats and m.rel(abs)) or abs), end)
1871 ret = 0
1875 ret = 0
1872
1876
1873 return ret
1877 return ret
1874
1878
1875 def log(ui, repo, *pats, **opts):
1879 def log(ui, repo, *pats, **opts):
1876 """show revision history of entire repository or files
1880 """show revision history of entire repository or files
1877
1881
1878 Print the revision history of the specified files or the entire
1882 Print the revision history of the specified files or the entire
1879 project.
1883 project.
1880
1884
1881 File history is shown without following rename or copy history of
1885 File history is shown without following rename or copy history of
1882 files. Use -f/--follow with a file name to follow history across
1886 files. Use -f/--follow with a file name to follow history across
1883 renames and copies. --follow without a file name will only show
1887 renames and copies. --follow without a file name will only show
1884 ancestors or descendants of the starting revision. --follow-first
1888 ancestors or descendants of the starting revision. --follow-first
1885 only follows the first parent of merge revisions.
1889 only follows the first parent of merge revisions.
1886
1890
1887 If no revision range is specified, the default is tip:0 unless
1891 If no revision range is specified, the default is tip:0 unless
1888 --follow is set, in which case the working directory parent is
1892 --follow is set, in which case the working directory parent is
1889 used as the starting revision.
1893 used as the starting revision.
1890
1894
1891 See 'hg help dates' for a list of formats valid for -d/--date.
1895 See 'hg help dates' for a list of formats valid for -d/--date.
1892
1896
1893 By default this command outputs: changeset id and hash, tags,
1897 By default this command outputs: changeset id and hash, tags,
1894 non-trivial parents, user, date and time, and a summary for each
1898 non-trivial parents, user, date and time, and a summary for each
1895 commit. When the -v/--verbose switch is used, the list of changed
1899 commit. When the -v/--verbose switch is used, the list of changed
1896 files and full commit message is shown.
1900 files and full commit message is shown.
1897
1901
1898 NOTE: log -p/--patch may generate unexpected diff output for merge
1902 NOTE: log -p/--patch may generate unexpected diff output for merge
1899 changesets, as it will only compare the merge changeset against
1903 changesets, as it will only compare the merge changeset against
1900 its first parent. Also, the files: list will only reflect files
1904 its first parent. Also, the files: list will only reflect files
1901 that are different from BOTH parents.
1905 that are different from BOTH parents.
1902
1906
1903 """
1907 """
1904
1908
1905 get = util.cachefunc(lambda r: repo[r].changeset())
1909 get = util.cachefunc(lambda r: repo[r].changeset())
1906 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1910 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1907
1911
1908 limit = cmdutil.loglimit(opts)
1912 limit = cmdutil.loglimit(opts)
1909 count = 0
1913 count = 0
1910
1914
1911 if opts.get('copies') and opts.get('rev'):
1915 if opts.get('copies') and opts.get('rev'):
1912 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1916 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1913 else:
1917 else:
1914 endrev = len(repo)
1918 endrev = len(repo)
1915 rcache = {}
1919 rcache = {}
1916 ncache = {}
1920 ncache = {}
1917 def getrenamed(fn, rev):
1921 def getrenamed(fn, rev):
1918 '''looks up all renames for a file (up to endrev) the first
1922 '''looks up all renames for a file (up to endrev) the first
1919 time the file is given. It indexes on the changerev and only
1923 time the file is given. It indexes on the changerev and only
1920 parses the manifest if linkrev != changerev.
1924 parses the manifest if linkrev != changerev.
1921 Returns rename info for fn at changerev rev.'''
1925 Returns rename info for fn at changerev rev.'''
1922 if fn not in rcache:
1926 if fn not in rcache:
1923 rcache[fn] = {}
1927 rcache[fn] = {}
1924 ncache[fn] = {}
1928 ncache[fn] = {}
1925 fl = repo.file(fn)
1929 fl = repo.file(fn)
1926 for i in fl:
1930 for i in fl:
1927 node = fl.node(i)
1931 node = fl.node(i)
1928 lr = fl.linkrev(i)
1932 lr = fl.linkrev(i)
1929 renamed = fl.renamed(node)
1933 renamed = fl.renamed(node)
1930 rcache[fn][lr] = renamed
1934 rcache[fn][lr] = renamed
1931 if renamed:
1935 if renamed:
1932 ncache[fn][node] = renamed
1936 ncache[fn][node] = renamed
1933 if lr >= endrev:
1937 if lr >= endrev:
1934 break
1938 break
1935 if rev in rcache[fn]:
1939 if rev in rcache[fn]:
1936 return rcache[fn][rev]
1940 return rcache[fn][rev]
1937
1941
1938 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1942 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1939 # filectx logic.
1943 # filectx logic.
1940
1944
1941 try:
1945 try:
1942 return repo[rev][fn].renamed()
1946 return repo[rev][fn].renamed()
1943 except error.LookupError:
1947 except error.LookupError:
1944 pass
1948 pass
1945 return None
1949 return None
1946
1950
1947 df = False
1951 df = False
1948 if opts["date"]:
1952 if opts["date"]:
1949 df = util.matchdate(opts["date"])
1953 df = util.matchdate(opts["date"])
1950
1954
1951 only_branches = opts.get('only_branch')
1955 only_branches = opts.get('only_branch')
1952
1956
1953 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1957 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1954 for st, rev, fns in changeiter:
1958 for st, rev, fns in changeiter:
1955 if st == 'add':
1959 if st == 'add':
1956 parents = [p for p in repo.changelog.parentrevs(rev)
1960 parents = [p for p in repo.changelog.parentrevs(rev)
1957 if p != nullrev]
1961 if p != nullrev]
1958 if opts.get('no_merges') and len(parents) == 2:
1962 if opts.get('no_merges') and len(parents) == 2:
1959 continue
1963 continue
1960 if opts.get('only_merges') and len(parents) != 2:
1964 if opts.get('only_merges') and len(parents) != 2:
1961 continue
1965 continue
1962
1966
1963 if only_branches:
1967 if only_branches:
1964 revbranch = get(rev)[5]['branch']
1968 revbranch = get(rev)[5]['branch']
1965 if revbranch not in only_branches:
1969 if revbranch not in only_branches:
1966 continue
1970 continue
1967
1971
1968 if df:
1972 if df:
1969 changes = get(rev)
1973 changes = get(rev)
1970 if not df(changes[2][0]):
1974 if not df(changes[2][0]):
1971 continue
1975 continue
1972
1976
1973 if opts.get('keyword'):
1977 if opts.get('keyword'):
1974 changes = get(rev)
1978 changes = get(rev)
1975 miss = 0
1979 miss = 0
1976 for k in [kw.lower() for kw in opts['keyword']]:
1980 for k in [kw.lower() for kw in opts['keyword']]:
1977 if not (k in changes[1].lower() or
1981 if not (k in changes[1].lower() or
1978 k in changes[4].lower() or
1982 k in changes[4].lower() or
1979 k in " ".join(changes[3]).lower()):
1983 k in " ".join(changes[3]).lower()):
1980 miss = 1
1984 miss = 1
1981 break
1985 break
1982 if miss:
1986 if miss:
1983 continue
1987 continue
1984
1988
1985 if opts['user']:
1989 if opts['user']:
1986 changes = get(rev)
1990 changes = get(rev)
1987 if not [k for k in opts['user'] if k in changes[1]]:
1991 if not [k for k in opts['user'] if k in changes[1]]:
1988 continue
1992 continue
1989
1993
1990 copies = []
1994 copies = []
1991 if opts.get('copies') and rev:
1995 if opts.get('copies') and rev:
1992 for fn in get(rev)[3]:
1996 for fn in get(rev)[3]:
1993 rename = getrenamed(fn, rev)
1997 rename = getrenamed(fn, rev)
1994 if rename:
1998 if rename:
1995 copies.append((fn, rename[0]))
1999 copies.append((fn, rename[0]))
1996 displayer.show(context.changectx(repo, rev), copies=copies)
2000 displayer.show(context.changectx(repo, rev), copies=copies)
1997 elif st == 'iter':
2001 elif st == 'iter':
1998 if count == limit: break
2002 if count == limit: break
1999 if displayer.flush(rev):
2003 if displayer.flush(rev):
2000 count += 1
2004 count += 1
2001
2005
2002 def manifest(ui, repo, node=None, rev=None):
2006 def manifest(ui, repo, node=None, rev=None):
2003 """output the current or given revision of the project manifest
2007 """output the current or given revision of the project manifest
2004
2008
2005 Print a list of version controlled files for the given revision.
2009 Print a list of version controlled files for the given revision.
2006 If no revision is given, the first parent of the working directory
2010 If no revision is given, the first parent of the working directory
2007 is used, or the null revision if none is checked out.
2011 is used, or the null revision if none is checked out.
2008
2012
2009 With -v flag, print file permissions, symlink and executable bits.
2013 With -v flag, print file permissions, symlink and executable bits.
2010 With --debug flag, print file revision hashes.
2014 With --debug flag, print file revision hashes.
2011 """
2015 """
2012
2016
2013 if rev and node:
2017 if rev and node:
2014 raise util.Abort(_("please specify just one revision"))
2018 raise util.Abort(_("please specify just one revision"))
2015
2019
2016 if not node:
2020 if not node:
2017 node = rev
2021 node = rev
2018
2022
2019 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2023 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2020 ctx = repo[node]
2024 ctx = repo[node]
2021 for f in ctx:
2025 for f in ctx:
2022 if ui.debugflag:
2026 if ui.debugflag:
2023 ui.write("%40s " % hex(ctx.manifest()[f]))
2027 ui.write("%40s " % hex(ctx.manifest()[f]))
2024 if ui.verbose:
2028 if ui.verbose:
2025 ui.write(decor[ctx.flags(f)])
2029 ui.write(decor[ctx.flags(f)])
2026 ui.write("%s\n" % f)
2030 ui.write("%s\n" % f)
2027
2031
2028 def merge(ui, repo, node=None, **opts):
2032 def merge(ui, repo, node=None, **opts):
2029 """merge working directory with another revision
2033 """merge working directory with another revision
2030
2034
2031 The contents of the current working directory is updated with all
2035 The contents of the current working directory is updated with all
2032 changes made in the requested revision since the last common
2036 changes made in the requested revision since the last common
2033 predecessor revision.
2037 predecessor revision.
2034
2038
2035 Files that changed between either parent are marked as changed for
2039 Files that changed between either parent are marked as changed for
2036 the next commit and a commit must be performed before any further
2040 the next commit and a commit must be performed before any further
2037 updates are allowed. The next commit has two parents.
2041 updates are allowed. The next commit has two parents.
2038
2042
2039 If no revision is specified, the working directory's parent is a
2043 If no revision is specified, the working directory's parent is a
2040 head revision, and the current branch contains exactly one other
2044 head revision, and the current branch contains exactly one other
2041 head, the other head is merged with by default. Otherwise, an
2045 head, the other head is merged with by default. Otherwise, an
2042 explicit revision to merge with must be provided.
2046 explicit revision to merge with must be provided.
2043 """
2047 """
2044
2048
2045 if opts.get('rev') and node:
2049 if opts.get('rev') and node:
2046 raise util.Abort(_("please specify just one revision"))
2050 raise util.Abort(_("please specify just one revision"))
2047 if not node:
2051 if not node:
2048 node = opts.get('rev')
2052 node = opts.get('rev')
2049
2053
2050 if not node:
2054 if not node:
2051 branch = repo.changectx(None).branch()
2055 branch = repo.changectx(None).branch()
2052 bheads = repo.branchheads(branch)
2056 bheads = repo.branchheads(branch)
2053 if len(bheads) > 2:
2057 if len(bheads) > 2:
2054 raise util.Abort(_("branch '%s' has %d heads - "
2058 raise util.Abort(_("branch '%s' has %d heads - "
2055 "please merge with an explicit rev") %
2059 "please merge with an explicit rev") %
2056 (branch, len(bheads)))
2060 (branch, len(bheads)))
2057
2061
2058 parent = repo.dirstate.parents()[0]
2062 parent = repo.dirstate.parents()[0]
2059 if len(bheads) == 1:
2063 if len(bheads) == 1:
2060 if len(repo.heads()) > 1:
2064 if len(repo.heads()) > 1:
2061 raise util.Abort(_("branch '%s' has one head - "
2065 raise util.Abort(_("branch '%s' has one head - "
2062 "please merge with an explicit rev") %
2066 "please merge with an explicit rev") %
2063 branch)
2067 branch)
2064 msg = _('there is nothing to merge')
2068 msg = _('there is nothing to merge')
2065 if parent != repo.lookup(repo[None].branch()):
2069 if parent != repo.lookup(repo[None].branch()):
2066 msg = _('%s - use "hg update" instead') % msg
2070 msg = _('%s - use "hg update" instead') % msg
2067 raise util.Abort(msg)
2071 raise util.Abort(msg)
2068
2072
2069 if parent not in bheads:
2073 if parent not in bheads:
2070 raise util.Abort(_('working dir not at a head rev - '
2074 raise util.Abort(_('working dir not at a head rev - '
2071 'use "hg update" or merge with an explicit rev'))
2075 'use "hg update" or merge with an explicit rev'))
2072 node = parent == bheads[0] and bheads[-1] or bheads[0]
2076 node = parent == bheads[0] and bheads[-1] or bheads[0]
2073
2077
2074 if opts.get('show'):
2078 if opts.get('show'):
2075 p1 = repo['.']
2079 p1 = repo['.']
2076 p2 = repo[node]
2080 p2 = repo[node]
2077 common = p1.ancestor(p2)
2081 common = p1.ancestor(p2)
2078 roots, heads = [common.node()], [p2.node()]
2082 roots, heads = [common.node()], [p2.node()]
2079 displayer = cmdutil.show_changeset(ui, repo, opts)
2083 displayer = cmdutil.show_changeset(ui, repo, opts)
2080 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2084 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2081 displayer.show(repo[node])
2085 displayer.show(repo[node])
2082 return 0
2086 return 0
2083
2087
2084 return hg.merge(repo, node, force=opts.get('force'))
2088 return hg.merge(repo, node, force=opts.get('force'))
2085
2089
2086 def outgoing(ui, repo, dest=None, **opts):
2090 def outgoing(ui, repo, dest=None, **opts):
2087 """show changesets not found in destination
2091 """show changesets not found in destination
2088
2092
2089 Show changesets not found in the specified destination repository
2093 Show changesets not found in the specified destination repository
2090 or the default push location. These are the changesets that would
2094 or the default push location. These are the changesets that would
2091 be pushed if a push was requested.
2095 be pushed if a push was requested.
2092
2096
2093 See pull for valid destination format details.
2097 See pull for valid destination format details.
2094 """
2098 """
2095 limit = cmdutil.loglimit(opts)
2099 limit = cmdutil.loglimit(opts)
2096 dest, revs, checkout = hg.parseurl(
2100 dest, revs, checkout = hg.parseurl(
2097 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2101 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2098 if revs:
2102 if revs:
2099 revs = [repo.lookup(rev) for rev in revs]
2103 revs = [repo.lookup(rev) for rev in revs]
2100
2104
2101 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2105 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2102 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2106 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2103 o = repo.findoutgoing(other, force=opts.get('force'))
2107 o = repo.findoutgoing(other, force=opts.get('force'))
2104 if not o:
2108 if not o:
2105 ui.status(_("no changes found\n"))
2109 ui.status(_("no changes found\n"))
2106 return 1
2110 return 1
2107 o = repo.changelog.nodesbetween(o, revs)[0]
2111 o = repo.changelog.nodesbetween(o, revs)[0]
2108 if opts.get('newest_first'):
2112 if opts.get('newest_first'):
2109 o.reverse()
2113 o.reverse()
2110 displayer = cmdutil.show_changeset(ui, repo, opts)
2114 displayer = cmdutil.show_changeset(ui, repo, opts)
2111 count = 0
2115 count = 0
2112 for n in o:
2116 for n in o:
2113 if count >= limit:
2117 if count >= limit:
2114 break
2118 break
2115 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2119 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2116 if opts.get('no_merges') and len(parents) == 2:
2120 if opts.get('no_merges') and len(parents) == 2:
2117 continue
2121 continue
2118 count += 1
2122 count += 1
2119 displayer.show(repo[n])
2123 displayer.show(repo[n])
2120
2124
2121 def parents(ui, repo, file_=None, **opts):
2125 def parents(ui, repo, file_=None, **opts):
2122 """show the parents of the working directory or revision
2126 """show the parents of the working directory or revision
2123
2127
2124 Print the working directory's parent revisions. If a revision is
2128 Print the working directory's parent revisions. If a revision is
2125 given via -r/--rev, the parent of that revision will be printed.
2129 given via -r/--rev, the parent of that revision will be printed.
2126 If a file argument is given, revision in which the file was last
2130 If a file argument is given, revision in which the file was last
2127 changed (before the working directory revision or the argument to
2131 changed (before the working directory revision or the argument to
2128 --rev if given) is printed.
2132 --rev if given) is printed.
2129 """
2133 """
2130 rev = opts.get('rev')
2134 rev = opts.get('rev')
2131 if rev:
2135 if rev:
2132 ctx = repo[rev]
2136 ctx = repo[rev]
2133 else:
2137 else:
2134 ctx = repo[None]
2138 ctx = repo[None]
2135
2139
2136 if file_:
2140 if file_:
2137 m = cmdutil.match(repo, (file_,), opts)
2141 m = cmdutil.match(repo, (file_,), opts)
2138 if m.anypats() or len(m.files()) != 1:
2142 if m.anypats() or len(m.files()) != 1:
2139 raise util.Abort(_('can only specify an explicit file name'))
2143 raise util.Abort(_('can only specify an explicit file name'))
2140 file_ = m.files()[0]
2144 file_ = m.files()[0]
2141 filenodes = []
2145 filenodes = []
2142 for cp in ctx.parents():
2146 for cp in ctx.parents():
2143 if not cp:
2147 if not cp:
2144 continue
2148 continue
2145 try:
2149 try:
2146 filenodes.append(cp.filenode(file_))
2150 filenodes.append(cp.filenode(file_))
2147 except error.LookupError:
2151 except error.LookupError:
2148 pass
2152 pass
2149 if not filenodes:
2153 if not filenodes:
2150 raise util.Abort(_("'%s' not found in manifest!") % file_)
2154 raise util.Abort(_("'%s' not found in manifest!") % file_)
2151 fl = repo.file(file_)
2155 fl = repo.file(file_)
2152 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2156 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2153 else:
2157 else:
2154 p = [cp.node() for cp in ctx.parents()]
2158 p = [cp.node() for cp in ctx.parents()]
2155
2159
2156 displayer = cmdutil.show_changeset(ui, repo, opts)
2160 displayer = cmdutil.show_changeset(ui, repo, opts)
2157 for n in p:
2161 for n in p:
2158 if n != nullid:
2162 if n != nullid:
2159 displayer.show(repo[n])
2163 displayer.show(repo[n])
2160
2164
2161 def paths(ui, repo, search=None):
2165 def paths(ui, repo, search=None):
2162 """show aliases for remote repositories
2166 """show aliases for remote repositories
2163
2167
2164 Show definition of symbolic path name NAME. If no name is given,
2168 Show definition of symbolic path name NAME. If no name is given,
2165 show definition of available names.
2169 show definition of available names.
2166
2170
2167 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2171 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2168 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2172 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2169
2173
2170 See 'hg help urls' for more information.
2174 See 'hg help urls' for more information.
2171 """
2175 """
2172 if search:
2176 if search:
2173 for name, path in ui.configitems("paths"):
2177 for name, path in ui.configitems("paths"):
2174 if name == search:
2178 if name == search:
2175 ui.write("%s\n" % url.hidepassword(path))
2179 ui.write("%s\n" % url.hidepassword(path))
2176 return
2180 return
2177 ui.warn(_("not found!\n"))
2181 ui.warn(_("not found!\n"))
2178 return 1
2182 return 1
2179 else:
2183 else:
2180 for name, path in ui.configitems("paths"):
2184 for name, path in ui.configitems("paths"):
2181 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2185 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2182
2186
2183 def postincoming(ui, repo, modheads, optupdate, checkout):
2187 def postincoming(ui, repo, modheads, optupdate, checkout):
2184 if modheads == 0:
2188 if modheads == 0:
2185 return
2189 return
2186 if optupdate:
2190 if optupdate:
2187 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2191 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2188 return hg.update(repo, checkout)
2192 return hg.update(repo, checkout)
2189 else:
2193 else:
2190 ui.status(_("not updating, since new heads added\n"))
2194 ui.status(_("not updating, since new heads added\n"))
2191 if modheads > 1:
2195 if modheads > 1:
2192 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2196 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2193 else:
2197 else:
2194 ui.status(_("(run 'hg update' to get a working copy)\n"))
2198 ui.status(_("(run 'hg update' to get a working copy)\n"))
2195
2199
2196 def pull(ui, repo, source="default", **opts):
2200 def pull(ui, repo, source="default", **opts):
2197 """pull changes from the specified source
2201 """pull changes from the specified source
2198
2202
2199 Pull changes from a remote repository to the local one.
2203 Pull changes from a remote repository to the local one.
2200
2204
2201 This finds all changes from the repository at the specified path
2205 This finds all changes from the repository at the specified path
2202 or URL and adds them to the local repository. By default, this
2206 or URL and adds them to the local repository. By default, this
2203 does not update the copy of the project in the working directory.
2207 does not update the copy of the project in the working directory.
2204
2208
2205 Use hg incoming if you want to see what will be added by the next
2209 Use hg incoming if you want to see what will be added by the next
2206 pull without actually adding the changes to the repository.
2210 pull without actually adding the changes to the repository.
2207
2211
2208 If SOURCE is omitted, the 'default' path will be used.
2212 If SOURCE is omitted, the 'default' path will be used.
2209 See 'hg help urls' for more information.
2213 See 'hg help urls' for more information.
2210 """
2214 """
2211 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2215 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2212 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2216 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2213 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2217 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2214 if revs:
2218 if revs:
2215 try:
2219 try:
2216 revs = [other.lookup(rev) for rev in revs]
2220 revs = [other.lookup(rev) for rev in revs]
2217 except error.CapabilityError:
2221 except error.CapabilityError:
2218 err = _("Other repository doesn't support revision lookup, "
2222 err = _("Other repository doesn't support revision lookup, "
2219 "so a rev cannot be specified.")
2223 "so a rev cannot be specified.")
2220 raise util.Abort(err)
2224 raise util.Abort(err)
2221
2225
2222 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2226 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2223 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2227 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2224
2228
2225 def push(ui, repo, dest=None, **opts):
2229 def push(ui, repo, dest=None, **opts):
2226 """push changes to the specified destination
2230 """push changes to the specified destination
2227
2231
2228 Push changes from the local repository to the given destination.
2232 Push changes from the local repository to the given destination.
2229
2233
2230 This is the symmetrical operation for pull. It moves changes from
2234 This is the symmetrical operation for pull. It moves changes from
2231 the current repository to a different one. If the destination is
2235 the current repository to a different one. If the destination is
2232 local this is identical to a pull in that directory from the
2236 local this is identical to a pull in that directory from the
2233 current one.
2237 current one.
2234
2238
2235 By default, push will refuse to run if it detects the result would
2239 By default, push will refuse to run if it detects the result would
2236 increase the number of remote heads. This generally indicates the
2240 increase the number of remote heads. This generally indicates the
2237 the client has forgotten to pull and merge before pushing.
2241 the client has forgotten to pull and merge before pushing.
2238
2242
2239 If -r/--rev is used, the named revision and all its ancestors will
2243 If -r/--rev is used, the named revision and all its ancestors will
2240 be pushed to the remote repository.
2244 be pushed to the remote repository.
2241
2245
2242 Look at the help text for URLs for important details about ssh://
2246 Look at the help text for URLs for important details about ssh://
2243 URLs. If DESTINATION is omitted, a default path will be used.
2247 URLs. If DESTINATION is omitted, a default path will be used.
2244 See 'hg help urls' for more information.
2248 See 'hg help urls' for more information.
2245 """
2249 """
2246 dest, revs, checkout = hg.parseurl(
2250 dest, revs, checkout = hg.parseurl(
2247 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2251 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2248 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2252 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2249 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2253 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2250 if revs:
2254 if revs:
2251 revs = [repo.lookup(rev) for rev in revs]
2255 revs = [repo.lookup(rev) for rev in revs]
2252 r = repo.push(other, opts.get('force'), revs=revs)
2256 r = repo.push(other, opts.get('force'), revs=revs)
2253 return r == 0
2257 return r == 0
2254
2258
2255 def recover(ui, repo):
2259 def recover(ui, repo):
2256 """roll back an interrupted transaction
2260 """roll back an interrupted transaction
2257
2261
2258 Recover from an interrupted commit or pull.
2262 Recover from an interrupted commit or pull.
2259
2263
2260 This command tries to fix the repository status after an
2264 This command tries to fix the repository status after an
2261 interrupted operation. It should only be necessary when Mercurial
2265 interrupted operation. It should only be necessary when Mercurial
2262 suggests it.
2266 suggests it.
2263 """
2267 """
2264 if repo.recover():
2268 if repo.recover():
2265 return hg.verify(repo)
2269 return hg.verify(repo)
2266 return 1
2270 return 1
2267
2271
2268 def remove(ui, repo, *pats, **opts):
2272 def remove(ui, repo, *pats, **opts):
2269 """remove the specified files on the next commit
2273 """remove the specified files on the next commit
2270
2274
2271 Schedule the indicated files for removal from the repository.
2275 Schedule the indicated files for removal from the repository.
2272
2276
2273 This only removes files from the current branch, not from the
2277 This only removes files from the current branch, not from the
2274 entire project history. -A/--after can be used to remove only
2278 entire project history. -A/--after can be used to remove only
2275 files that have already been deleted, -f/--force can be used to
2279 files that have already been deleted, -f/--force can be used to
2276 force deletion, and -Af can be used to remove files from the next
2280 force deletion, and -Af can be used to remove files from the next
2277 revision without deleting them.
2281 revision without deleting them.
2278
2282
2279 The following table details the behavior of remove for different
2283 The following table details the behavior of remove for different
2280 file states (columns) and option combinations (rows). The file
2284 file states (columns) and option combinations (rows). The file
2281 states are Added, Clean, Modified and Missing (as reported by hg
2285 states are Added, Clean, Modified and Missing (as reported by hg
2282 status). The actions are Warn, Remove (from branch) and Delete
2286 status). The actions are Warn, Remove (from branch) and Delete
2283 (from disk).
2287 (from disk).
2284
2288
2285 A C M !
2289 A C M !
2286 none W RD W R
2290 none W RD W R
2287 -f R RD RD R
2291 -f R RD RD R
2288 -A W W W R
2292 -A W W W R
2289 -Af R R R R
2293 -Af R R R R
2290
2294
2291 This command schedules the files to be removed at the next commit.
2295 This command schedules the files to be removed at the next commit.
2292 To undo a remove before that, see hg revert.
2296 To undo a remove before that, see hg revert.
2293 """
2297 """
2294
2298
2295 after, force = opts.get('after'), opts.get('force')
2299 after, force = opts.get('after'), opts.get('force')
2296 if not pats and not after:
2300 if not pats and not after:
2297 raise util.Abort(_('no files specified'))
2301 raise util.Abort(_('no files specified'))
2298
2302
2299 m = cmdutil.match(repo, pats, opts)
2303 m = cmdutil.match(repo, pats, opts)
2300 s = repo.status(match=m, clean=True)
2304 s = repo.status(match=m, clean=True)
2301 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2305 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2302
2306
2303 def warn(files, reason):
2307 def warn(files, reason):
2304 for f in files:
2308 for f in files:
2305 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2309 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2306 % (m.rel(f), reason))
2310 % (m.rel(f), reason))
2307
2311
2308 if force:
2312 if force:
2309 remove, forget = modified + deleted + clean, added
2313 remove, forget = modified + deleted + clean, added
2310 elif after:
2314 elif after:
2311 remove, forget = deleted, []
2315 remove, forget = deleted, []
2312 warn(modified + added + clean, _('still exists'))
2316 warn(modified + added + clean, _('still exists'))
2313 else:
2317 else:
2314 remove, forget = deleted + clean, []
2318 remove, forget = deleted + clean, []
2315 warn(modified, _('is modified'))
2319 warn(modified, _('is modified'))
2316 warn(added, _('has been marked for add'))
2320 warn(added, _('has been marked for add'))
2317
2321
2318 for f in sorted(remove + forget):
2322 for f in sorted(remove + forget):
2319 if ui.verbose or not m.exact(f):
2323 if ui.verbose or not m.exact(f):
2320 ui.status(_('removing %s\n') % m.rel(f))
2324 ui.status(_('removing %s\n') % m.rel(f))
2321
2325
2322 repo.forget(forget)
2326 repo.forget(forget)
2323 repo.remove(remove, unlink=not after)
2327 repo.remove(remove, unlink=not after)
2324
2328
2325 def rename(ui, repo, *pats, **opts):
2329 def rename(ui, repo, *pats, **opts):
2326 """rename files; equivalent of copy + remove
2330 """rename files; equivalent of copy + remove
2327
2331
2328 Mark dest as copies of sources; mark sources for deletion. If dest
2332 Mark dest as copies of sources; mark sources for deletion. If dest
2329 is a directory, copies are put in that directory. If dest is a
2333 is a directory, copies are put in that directory. If dest is a
2330 file, there can only be one source.
2334 file, there can only be one source.
2331
2335
2332 By default, this command copies the contents of files as they
2336 By default, this command copies the contents of files as they
2333 exist in the working directory. If invoked with -A/--after, the
2337 exist in the working directory. If invoked with -A/--after, the
2334 operation is recorded, but no copying is performed.
2338 operation is recorded, but no copying is performed.
2335
2339
2336 This command takes effect at the next commit. To undo a rename
2340 This command takes effect at the next commit. To undo a rename
2337 before that, see hg revert.
2341 before that, see hg revert.
2338 """
2342 """
2339 wlock = repo.wlock(False)
2343 wlock = repo.wlock(False)
2340 try:
2344 try:
2341 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2345 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2342 finally:
2346 finally:
2343 wlock.release()
2347 wlock.release()
2344
2348
2345 def resolve(ui, repo, *pats, **opts):
2349 def resolve(ui, repo, *pats, **opts):
2346 """retry file merges from a merge or update
2350 """retry file merges from a merge or update
2347
2351
2348 This command will cleanly retry unresolved file merges using file
2352 This command will cleanly retry unresolved file merges using file
2349 revisions preserved from the last update or merge. To attempt to
2353 revisions preserved from the last update or merge. To attempt to
2350 resolve all unresolved files, use the -a/--all switch.
2354 resolve all unresolved files, use the -a/--all switch.
2351
2355
2352 If a conflict is resolved manually, please note that the changes
2356 If a conflict is resolved manually, please note that the changes
2353 will be overwritten if the merge is retried with resolve. The
2357 will be overwritten if the merge is retried with resolve. The
2354 -m/--mark switch should be used to mark the file as resolved.
2358 -m/--mark switch should be used to mark the file as resolved.
2355
2359
2356 This command will also allow listing resolved files and manually
2360 This command will also allow listing resolved files and manually
2357 marking and unmarking files as resolved. All files must be marked
2361 marking and unmarking files as resolved. All files must be marked
2358 as resolved before the new commits are permitted.
2362 as resolved before the new commits are permitted.
2359
2363
2360 The codes used to show the status of files are:
2364 The codes used to show the status of files are:
2361 U = unresolved
2365 U = unresolved
2362 R = resolved
2366 R = resolved
2363 """
2367 """
2364
2368
2365 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2369 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2366
2370
2367 if (show and (mark or unmark)) or (mark and unmark):
2371 if (show and (mark or unmark)) or (mark and unmark):
2368 raise util.Abort(_("too many options specified"))
2372 raise util.Abort(_("too many options specified"))
2369 if pats and all:
2373 if pats and all:
2370 raise util.Abort(_("can't specify --all and patterns"))
2374 raise util.Abort(_("can't specify --all and patterns"))
2371 if not (all or pats or show or mark or unmark):
2375 if not (all or pats or show or mark or unmark):
2372 raise util.Abort(_('no files or directories specified; '
2376 raise util.Abort(_('no files or directories specified; '
2373 'use --all to remerge all files'))
2377 'use --all to remerge all files'))
2374
2378
2375 ms = merge_.mergestate(repo)
2379 ms = merge_.mergestate(repo)
2376 m = cmdutil.match(repo, pats, opts)
2380 m = cmdutil.match(repo, pats, opts)
2377
2381
2378 for f in ms:
2382 for f in ms:
2379 if m(f):
2383 if m(f):
2380 if show:
2384 if show:
2381 ui.write("%s %s\n" % (ms[f].upper(), f))
2385 ui.write("%s %s\n" % (ms[f].upper(), f))
2382 elif mark:
2386 elif mark:
2383 ms.mark(f, "r")
2387 ms.mark(f, "r")
2384 elif unmark:
2388 elif unmark:
2385 ms.mark(f, "u")
2389 ms.mark(f, "u")
2386 else:
2390 else:
2387 wctx = repo[None]
2391 wctx = repo[None]
2388 mctx = wctx.parents()[-1]
2392 mctx = wctx.parents()[-1]
2389
2393
2390 # backup pre-resolve (merge uses .orig for its own purposes)
2394 # backup pre-resolve (merge uses .orig for its own purposes)
2391 a = repo.wjoin(f)
2395 a = repo.wjoin(f)
2392 util.copyfile(a, a + ".resolve")
2396 util.copyfile(a, a + ".resolve")
2393
2397
2394 # resolve file
2398 # resolve file
2395 ms.resolve(f, wctx, mctx)
2399 ms.resolve(f, wctx, mctx)
2396
2400
2397 # replace filemerge's .orig file with our resolve file
2401 # replace filemerge's .orig file with our resolve file
2398 util.rename(a + ".resolve", a + ".orig")
2402 util.rename(a + ".resolve", a + ".orig")
2399
2403
2400 def revert(ui, repo, *pats, **opts):
2404 def revert(ui, repo, *pats, **opts):
2401 """restore individual files or directories to an earlier state
2405 """restore individual files or directories to an earlier state
2402
2406
2403 (Use update -r to check out earlier revisions, revert does not
2407 (Use update -r to check out earlier revisions, revert does not
2404 change the working directory parents.)
2408 change the working directory parents.)
2405
2409
2406 With no revision specified, revert the named files or directories
2410 With no revision specified, revert the named files or directories
2407 to the contents they had in the parent of the working directory.
2411 to the contents they had in the parent of the working directory.
2408 This restores the contents of the affected files to an unmodified
2412 This restores the contents of the affected files to an unmodified
2409 state and unschedules adds, removes, copies, and renames. If the
2413 state and unschedules adds, removes, copies, and renames. If the
2410 working directory has two parents, you must explicitly specify the
2414 working directory has two parents, you must explicitly specify the
2411 revision to revert to.
2415 revision to revert to.
2412
2416
2413 Using the -r/--rev option, revert the given files or directories
2417 Using the -r/--rev option, revert the given files or directories
2414 to their contents as of a specific revision. This can be helpful
2418 to their contents as of a specific revision. This can be helpful
2415 to "roll back" some or all of an earlier change. See 'hg help
2419 to "roll back" some or all of an earlier change. See 'hg help
2416 dates' for a list of formats valid for -d/--date.
2420 dates' for a list of formats valid for -d/--date.
2417
2421
2418 Revert modifies the working directory. It does not commit any
2422 Revert modifies the working directory. It does not commit any
2419 changes, or change the parent of the working directory. If you
2423 changes, or change the parent of the working directory. If you
2420 revert to a revision other than the parent of the working
2424 revert to a revision other than the parent of the working
2421 directory, the reverted files will thus appear modified
2425 directory, the reverted files will thus appear modified
2422 afterwards.
2426 afterwards.
2423
2427
2424 If a file has been deleted, it is restored. If the executable mode
2428 If a file has been deleted, it is restored. If the executable mode
2425 of a file was changed, it is reset.
2429 of a file was changed, it is reset.
2426
2430
2427 If names are given, all files matching the names are reverted.
2431 If names are given, all files matching the names are reverted.
2428 If no arguments are given, no files are reverted.
2432 If no arguments are given, no files are reverted.
2429
2433
2430 Modified files are saved with a .orig suffix before reverting.
2434 Modified files are saved with a .orig suffix before reverting.
2431 To disable these backups, use --no-backup.
2435 To disable these backups, use --no-backup.
2432 """
2436 """
2433
2437
2434 if opts["date"]:
2438 if opts["date"]:
2435 if opts["rev"]:
2439 if opts["rev"]:
2436 raise util.Abort(_("you can't specify a revision and a date"))
2440 raise util.Abort(_("you can't specify a revision and a date"))
2437 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2441 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2438
2442
2439 if not pats and not opts.get('all'):
2443 if not pats and not opts.get('all'):
2440 raise util.Abort(_('no files or directories specified; '
2444 raise util.Abort(_('no files or directories specified; '
2441 'use --all to revert the whole repo'))
2445 'use --all to revert the whole repo'))
2442
2446
2443 parent, p2 = repo.dirstate.parents()
2447 parent, p2 = repo.dirstate.parents()
2444 if not opts.get('rev') and p2 != nullid:
2448 if not opts.get('rev') and p2 != nullid:
2445 raise util.Abort(_('uncommitted merge - please provide a '
2449 raise util.Abort(_('uncommitted merge - please provide a '
2446 'specific revision'))
2450 'specific revision'))
2447 ctx = repo[opts.get('rev')]
2451 ctx = repo[opts.get('rev')]
2448 node = ctx.node()
2452 node = ctx.node()
2449 mf = ctx.manifest()
2453 mf = ctx.manifest()
2450 if node == parent:
2454 if node == parent:
2451 pmf = mf
2455 pmf = mf
2452 else:
2456 else:
2453 pmf = None
2457 pmf = None
2454
2458
2455 # need all matching names in dirstate and manifest of target rev,
2459 # need all matching names in dirstate and manifest of target rev,
2456 # so have to walk both. do not print errors if files exist in one
2460 # so have to walk both. do not print errors if files exist in one
2457 # but not other.
2461 # but not other.
2458
2462
2459 names = {}
2463 names = {}
2460
2464
2461 wlock = repo.wlock()
2465 wlock = repo.wlock()
2462 try:
2466 try:
2463 # walk dirstate.
2467 # walk dirstate.
2464
2468
2465 m = cmdutil.match(repo, pats, opts)
2469 m = cmdutil.match(repo, pats, opts)
2466 m.bad = lambda x,y: False
2470 m.bad = lambda x,y: False
2467 for abs in repo.walk(m):
2471 for abs in repo.walk(m):
2468 names[abs] = m.rel(abs), m.exact(abs)
2472 names[abs] = m.rel(abs), m.exact(abs)
2469
2473
2470 # walk target manifest.
2474 # walk target manifest.
2471
2475
2472 def badfn(path, msg):
2476 def badfn(path, msg):
2473 if path in names:
2477 if path in names:
2474 return False
2478 return False
2475 path_ = path + '/'
2479 path_ = path + '/'
2476 for f in names:
2480 for f in names:
2477 if f.startswith(path_):
2481 if f.startswith(path_):
2478 return False
2482 return False
2479 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2483 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2480 return False
2484 return False
2481
2485
2482 m = cmdutil.match(repo, pats, opts)
2486 m = cmdutil.match(repo, pats, opts)
2483 m.bad = badfn
2487 m.bad = badfn
2484 for abs in repo[node].walk(m):
2488 for abs in repo[node].walk(m):
2485 if abs not in names:
2489 if abs not in names:
2486 names[abs] = m.rel(abs), m.exact(abs)
2490 names[abs] = m.rel(abs), m.exact(abs)
2487
2491
2488 m = cmdutil.matchfiles(repo, names)
2492 m = cmdutil.matchfiles(repo, names)
2489 changes = repo.status(match=m)[:4]
2493 changes = repo.status(match=m)[:4]
2490 modified, added, removed, deleted = map(set, changes)
2494 modified, added, removed, deleted = map(set, changes)
2491
2495
2492 # if f is a rename, also revert the source
2496 # if f is a rename, also revert the source
2493 cwd = repo.getcwd()
2497 cwd = repo.getcwd()
2494 for f in added:
2498 for f in added:
2495 src = repo.dirstate.copied(f)
2499 src = repo.dirstate.copied(f)
2496 if src and src not in names and repo.dirstate[src] == 'r':
2500 if src and src not in names and repo.dirstate[src] == 'r':
2497 removed.add(src)
2501 removed.add(src)
2498 names[src] = (repo.pathto(src, cwd), True)
2502 names[src] = (repo.pathto(src, cwd), True)
2499
2503
2500 def removeforget(abs):
2504 def removeforget(abs):
2501 if repo.dirstate[abs] == 'a':
2505 if repo.dirstate[abs] == 'a':
2502 return _('forgetting %s\n')
2506 return _('forgetting %s\n')
2503 return _('removing %s\n')
2507 return _('removing %s\n')
2504
2508
2505 revert = ([], _('reverting %s\n'))
2509 revert = ([], _('reverting %s\n'))
2506 add = ([], _('adding %s\n'))
2510 add = ([], _('adding %s\n'))
2507 remove = ([], removeforget)
2511 remove = ([], removeforget)
2508 undelete = ([], _('undeleting %s\n'))
2512 undelete = ([], _('undeleting %s\n'))
2509
2513
2510 disptable = (
2514 disptable = (
2511 # dispatch table:
2515 # dispatch table:
2512 # file state
2516 # file state
2513 # action if in target manifest
2517 # action if in target manifest
2514 # action if not in target manifest
2518 # action if not in target manifest
2515 # make backup if in target manifest
2519 # make backup if in target manifest
2516 # make backup if not in target manifest
2520 # make backup if not in target manifest
2517 (modified, revert, remove, True, True),
2521 (modified, revert, remove, True, True),
2518 (added, revert, remove, True, False),
2522 (added, revert, remove, True, False),
2519 (removed, undelete, None, False, False),
2523 (removed, undelete, None, False, False),
2520 (deleted, revert, remove, False, False),
2524 (deleted, revert, remove, False, False),
2521 )
2525 )
2522
2526
2523 for abs, (rel, exact) in sorted(names.items()):
2527 for abs, (rel, exact) in sorted(names.items()):
2524 mfentry = mf.get(abs)
2528 mfentry = mf.get(abs)
2525 target = repo.wjoin(abs)
2529 target = repo.wjoin(abs)
2526 def handle(xlist, dobackup):
2530 def handle(xlist, dobackup):
2527 xlist[0].append(abs)
2531 xlist[0].append(abs)
2528 if dobackup and not opts.get('no_backup') and util.lexists(target):
2532 if dobackup and not opts.get('no_backup') and util.lexists(target):
2529 bakname = "%s.orig" % rel
2533 bakname = "%s.orig" % rel
2530 ui.note(_('saving current version of %s as %s\n') %
2534 ui.note(_('saving current version of %s as %s\n') %
2531 (rel, bakname))
2535 (rel, bakname))
2532 if not opts.get('dry_run'):
2536 if not opts.get('dry_run'):
2533 util.copyfile(target, bakname)
2537 util.copyfile(target, bakname)
2534 if ui.verbose or not exact:
2538 if ui.verbose or not exact:
2535 msg = xlist[1]
2539 msg = xlist[1]
2536 if not isinstance(msg, basestring):
2540 if not isinstance(msg, basestring):
2537 msg = msg(abs)
2541 msg = msg(abs)
2538 ui.status(msg % rel)
2542 ui.status(msg % rel)
2539 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2543 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2540 if abs not in table: continue
2544 if abs not in table: continue
2541 # file has changed in dirstate
2545 # file has changed in dirstate
2542 if mfentry:
2546 if mfentry:
2543 handle(hitlist, backuphit)
2547 handle(hitlist, backuphit)
2544 elif misslist is not None:
2548 elif misslist is not None:
2545 handle(misslist, backupmiss)
2549 handle(misslist, backupmiss)
2546 break
2550 break
2547 else:
2551 else:
2548 if abs not in repo.dirstate:
2552 if abs not in repo.dirstate:
2549 if mfentry:
2553 if mfentry:
2550 handle(add, True)
2554 handle(add, True)
2551 elif exact:
2555 elif exact:
2552 ui.warn(_('file not managed: %s\n') % rel)
2556 ui.warn(_('file not managed: %s\n') % rel)
2553 continue
2557 continue
2554 # file has not changed in dirstate
2558 # file has not changed in dirstate
2555 if node == parent:
2559 if node == parent:
2556 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2560 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2557 continue
2561 continue
2558 if pmf is None:
2562 if pmf is None:
2559 # only need parent manifest in this unlikely case,
2563 # only need parent manifest in this unlikely case,
2560 # so do not read by default
2564 # so do not read by default
2561 pmf = repo[parent].manifest()
2565 pmf = repo[parent].manifest()
2562 if abs in pmf:
2566 if abs in pmf:
2563 if mfentry:
2567 if mfentry:
2564 # if version of file is same in parent and target
2568 # if version of file is same in parent and target
2565 # manifests, do nothing
2569 # manifests, do nothing
2566 if (pmf[abs] != mfentry or
2570 if (pmf[abs] != mfentry or
2567 pmf.flags(abs) != mf.flags(abs)):
2571 pmf.flags(abs) != mf.flags(abs)):
2568 handle(revert, False)
2572 handle(revert, False)
2569 else:
2573 else:
2570 handle(remove, False)
2574 handle(remove, False)
2571
2575
2572 if not opts.get('dry_run'):
2576 if not opts.get('dry_run'):
2573 def checkout(f):
2577 def checkout(f):
2574 fc = ctx[f]
2578 fc = ctx[f]
2575 repo.wwrite(f, fc.data(), fc.flags())
2579 repo.wwrite(f, fc.data(), fc.flags())
2576
2580
2577 audit_path = util.path_auditor(repo.root)
2581 audit_path = util.path_auditor(repo.root)
2578 for f in remove[0]:
2582 for f in remove[0]:
2579 if repo.dirstate[f] == 'a':
2583 if repo.dirstate[f] == 'a':
2580 repo.dirstate.forget(f)
2584 repo.dirstate.forget(f)
2581 continue
2585 continue
2582 audit_path(f)
2586 audit_path(f)
2583 try:
2587 try:
2584 util.unlink(repo.wjoin(f))
2588 util.unlink(repo.wjoin(f))
2585 except OSError:
2589 except OSError:
2586 pass
2590 pass
2587 repo.dirstate.remove(f)
2591 repo.dirstate.remove(f)
2588
2592
2589 normal = None
2593 normal = None
2590 if node == parent:
2594 if node == parent:
2591 # We're reverting to our parent. If possible, we'd like status
2595 # We're reverting to our parent. If possible, we'd like status
2592 # to report the file as clean. We have to use normallookup for
2596 # to report the file as clean. We have to use normallookup for
2593 # merges to avoid losing information about merged/dirty files.
2597 # merges to avoid losing information about merged/dirty files.
2594 if p2 != nullid:
2598 if p2 != nullid:
2595 normal = repo.dirstate.normallookup
2599 normal = repo.dirstate.normallookup
2596 else:
2600 else:
2597 normal = repo.dirstate.normal
2601 normal = repo.dirstate.normal
2598 for f in revert[0]:
2602 for f in revert[0]:
2599 checkout(f)
2603 checkout(f)
2600 if normal:
2604 if normal:
2601 normal(f)
2605 normal(f)
2602
2606
2603 for f in add[0]:
2607 for f in add[0]:
2604 checkout(f)
2608 checkout(f)
2605 repo.dirstate.add(f)
2609 repo.dirstate.add(f)
2606
2610
2607 normal = repo.dirstate.normallookup
2611 normal = repo.dirstate.normallookup
2608 if node == parent and p2 == nullid:
2612 if node == parent and p2 == nullid:
2609 normal = repo.dirstate.normal
2613 normal = repo.dirstate.normal
2610 for f in undelete[0]:
2614 for f in undelete[0]:
2611 checkout(f)
2615 checkout(f)
2612 normal(f)
2616 normal(f)
2613
2617
2614 finally:
2618 finally:
2615 wlock.release()
2619 wlock.release()
2616
2620
2617 def rollback(ui, repo):
2621 def rollback(ui, repo):
2618 """roll back the last transaction
2622 """roll back the last transaction
2619
2623
2620 This command should be used with care. There is only one level of
2624 This command should be used with care. There is only one level of
2621 rollback, and there is no way to undo a rollback. It will also
2625 rollback, and there is no way to undo a rollback. It will also
2622 restore the dirstate at the time of the last transaction, losing
2626 restore the dirstate at the time of the last transaction, losing
2623 any dirstate changes since that time.
2627 any dirstate changes since that time.
2624
2628
2625 Transactions are used to encapsulate the effects of all commands
2629 Transactions are used to encapsulate the effects of all commands
2626 that create new changesets or propagate existing changesets into a
2630 that create new changesets or propagate existing changesets into a
2627 repository. For example, the following commands are transactional,
2631 repository. For example, the following commands are transactional,
2628 and their effects can be rolled back:
2632 and their effects can be rolled back:
2629
2633
2630 commit
2634 commit
2631 import
2635 import
2632 pull
2636 pull
2633 push (with this repository as destination)
2637 push (with this repository as destination)
2634 unbundle
2638 unbundle
2635
2639
2636 This command is not intended for use on public repositories. Once
2640 This command is not intended for use on public repositories. Once
2637 changes are visible for pull by other users, rolling a transaction
2641 changes are visible for pull by other users, rolling a transaction
2638 back locally is ineffective (someone else may already have pulled
2642 back locally is ineffective (someone else may already have pulled
2639 the changes). Furthermore, a race is possible with readers of the
2643 the changes). Furthermore, a race is possible with readers of the
2640 repository; for example an in-progress pull from the repository
2644 repository; for example an in-progress pull from the repository
2641 may fail if a rollback is performed.
2645 may fail if a rollback is performed.
2642 """
2646 """
2643 repo.rollback()
2647 repo.rollback()
2644
2648
2645 def root(ui, repo):
2649 def root(ui, repo):
2646 """print the root (top) of the current working directory
2650 """print the root (top) of the current working directory
2647
2651
2648 Print the root directory of the current repository.
2652 Print the root directory of the current repository.
2649 """
2653 """
2650 ui.write(repo.root + "\n")
2654 ui.write(repo.root + "\n")
2651
2655
2652 def serve(ui, repo, **opts):
2656 def serve(ui, repo, **opts):
2653 """export the repository via HTTP
2657 """export the repository via HTTP
2654
2658
2655 Start a local HTTP repository browser and pull server.
2659 Start a local HTTP repository browser and pull server.
2656
2660
2657 By default, the server logs accesses to stdout and errors to
2661 By default, the server logs accesses to stdout and errors to
2658 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2662 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2659 files.
2663 files.
2660 """
2664 """
2661
2665
2662 if opts["stdio"]:
2666 if opts["stdio"]:
2663 if repo is None:
2667 if repo is None:
2664 raise error.RepoError(_("There is no Mercurial repository here"
2668 raise error.RepoError(_("There is no Mercurial repository here"
2665 " (.hg not found)"))
2669 " (.hg not found)"))
2666 s = sshserver.sshserver(ui, repo)
2670 s = sshserver.sshserver(ui, repo)
2667 s.serve_forever()
2671 s.serve_forever()
2668
2672
2669 baseui = repo and repo.baseui or ui
2673 baseui = repo and repo.baseui or ui
2670 optlist = ("name templates style address port prefix ipv6"
2674 optlist = ("name templates style address port prefix ipv6"
2671 " accesslog errorlog webdir_conf certificate")
2675 " accesslog errorlog webdir_conf certificate")
2672 for o in optlist.split():
2676 for o in optlist.split():
2673 if opts[o]:
2677 if opts[o]:
2674 baseui.setconfig("web", o, str(opts[o]))
2678 baseui.setconfig("web", o, str(opts[o]))
2675 if (repo is not None) and (repo.ui != baseui):
2679 if (repo is not None) and (repo.ui != baseui):
2676 repo.ui.setconfig("web", o, str(opts[o]))
2680 repo.ui.setconfig("web", o, str(opts[o]))
2677
2681
2678 if repo is None and not ui.config("web", "webdir_conf"):
2682 if repo is None and not ui.config("web", "webdir_conf"):
2679 raise error.RepoError(_("There is no Mercurial repository here"
2683 raise error.RepoError(_("There is no Mercurial repository here"
2680 " (.hg not found)"))
2684 " (.hg not found)"))
2681
2685
2682 class service:
2686 class service:
2683 def init(self):
2687 def init(self):
2684 util.set_signal_handler()
2688 util.set_signal_handler()
2685 self.httpd = server.create_server(baseui, repo)
2689 self.httpd = server.create_server(baseui, repo)
2686
2690
2687 if not ui.verbose: return
2691 if not ui.verbose: return
2688
2692
2689 if self.httpd.prefix:
2693 if self.httpd.prefix:
2690 prefix = self.httpd.prefix.strip('/') + '/'
2694 prefix = self.httpd.prefix.strip('/') + '/'
2691 else:
2695 else:
2692 prefix = ''
2696 prefix = ''
2693
2697
2694 port = ':%d' % self.httpd.port
2698 port = ':%d' % self.httpd.port
2695 if port == ':80':
2699 if port == ':80':
2696 port = ''
2700 port = ''
2697
2701
2698 bindaddr = self.httpd.addr
2702 bindaddr = self.httpd.addr
2699 if bindaddr == '0.0.0.0':
2703 if bindaddr == '0.0.0.0':
2700 bindaddr = '*'
2704 bindaddr = '*'
2701 elif ':' in bindaddr: # IPv6
2705 elif ':' in bindaddr: # IPv6
2702 bindaddr = '[%s]' % bindaddr
2706 bindaddr = '[%s]' % bindaddr
2703
2707
2704 fqaddr = self.httpd.fqaddr
2708 fqaddr = self.httpd.fqaddr
2705 if ':' in fqaddr:
2709 if ':' in fqaddr:
2706 fqaddr = '[%s]' % fqaddr
2710 fqaddr = '[%s]' % fqaddr
2707 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2711 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2708 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2712 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2709
2713
2710 def run(self):
2714 def run(self):
2711 self.httpd.serve_forever()
2715 self.httpd.serve_forever()
2712
2716
2713 service = service()
2717 service = service()
2714
2718
2715 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2719 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2716
2720
2717 def status(ui, repo, *pats, **opts):
2721 def status(ui, repo, *pats, **opts):
2718 """show changed files in the working directory
2722 """show changed files in the working directory
2719
2723
2720 Show status of files in the repository. If names are given, only
2724 Show status of files in the repository. If names are given, only
2721 files that match are shown. Files that are clean or ignored or
2725 files that match are shown. Files that are clean or ignored or
2722 source of a copy/move operation, are not listed unless -c/--clean,
2726 source of a copy/move operation, are not listed unless -c/--clean,
2723 -i/--ignored, -C/--copies or -A/--all is given. Unless options
2727 -i/--ignored, -C/--copies or -A/--all is given. Unless options
2724 described with "show only ..." are given, the options -mardu are
2728 described with "show only ..." are given, the options -mardu are
2725 used.
2729 used.
2726
2730
2727 Option -q/--quiet hides untracked (unknown and ignored) files
2731 Option -q/--quiet hides untracked (unknown and ignored) files
2728 unless explicitly requested with -u/--unknown or -i/--ignored.
2732 unless explicitly requested with -u/--unknown or -i/--ignored.
2729
2733
2730 NOTE: status may appear to disagree with diff if permissions have
2734 NOTE: status may appear to disagree with diff if permissions have
2731 changed or a merge has occurred. The standard diff format does not
2735 changed or a merge has occurred. The standard diff format does not
2732 report permission changes and diff only reports changes relative
2736 report permission changes and diff only reports changes relative
2733 to one merge parent.
2737 to one merge parent.
2734
2738
2735 If one revision is given, it is used as the base revision.
2739 If one revision is given, it is used as the base revision.
2736 If two revisions are given, the difference between them is shown.
2740 If two revisions are given, the difference between them is shown.
2737
2741
2738 The codes used to show the status of files are:
2742 The codes used to show the status of files are:
2739 M = modified
2743 M = modified
2740 A = added
2744 A = added
2741 R = removed
2745 R = removed
2742 C = clean
2746 C = clean
2743 ! = missing (deleted by non-hg command, but still tracked)
2747 ! = missing (deleted by non-hg command, but still tracked)
2744 ? = not tracked
2748 ? = not tracked
2745 I = ignored
2749 I = ignored
2746 = the previous added file was copied from here
2750 = the previous added file was copied from here
2747 """
2751 """
2748
2752
2749 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2753 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2750 cwd = (pats and repo.getcwd()) or ''
2754 cwd = (pats and repo.getcwd()) or ''
2751 end = opts.get('print0') and '\0' or '\n'
2755 end = opts.get('print0') and '\0' or '\n'
2752 copy = {}
2756 copy = {}
2753 states = 'modified added removed deleted unknown ignored clean'.split()
2757 states = 'modified added removed deleted unknown ignored clean'.split()
2754 show = [k for k in states if opts.get(k)]
2758 show = [k for k in states if opts.get(k)]
2755 if opts.get('all'):
2759 if opts.get('all'):
2756 show += ui.quiet and (states[:4] + ['clean']) or states
2760 show += ui.quiet and (states[:4] + ['clean']) or states
2757 if not show:
2761 if not show:
2758 show = ui.quiet and states[:4] or states[:5]
2762 show = ui.quiet and states[:4] or states[:5]
2759
2763
2760 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2764 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2761 'ignored' in show, 'clean' in show, 'unknown' in show)
2765 'ignored' in show, 'clean' in show, 'unknown' in show)
2762 changestates = zip(states, 'MAR!?IC', stat)
2766 changestates = zip(states, 'MAR!?IC', stat)
2763
2767
2764 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2768 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2765 ctxn = repo[nullid]
2769 ctxn = repo[nullid]
2766 ctx1 = repo[node1]
2770 ctx1 = repo[node1]
2767 ctx2 = repo[node2]
2771 ctx2 = repo[node2]
2768 added = stat[1]
2772 added = stat[1]
2769 if node2 is None:
2773 if node2 is None:
2770 added = stat[0] + stat[1] # merged?
2774 added = stat[0] + stat[1] # merged?
2771
2775
2772 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2776 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2773 if k in added:
2777 if k in added:
2774 copy[k] = v
2778 copy[k] = v
2775 elif v in added:
2779 elif v in added:
2776 copy[v] = k
2780 copy[v] = k
2777
2781
2778 for state, char, files in changestates:
2782 for state, char, files in changestates:
2779 if state in show:
2783 if state in show:
2780 format = "%s %%s%s" % (char, end)
2784 format = "%s %%s%s" % (char, end)
2781 if opts.get('no_status'):
2785 if opts.get('no_status'):
2782 format = "%%s%s" % end
2786 format = "%%s%s" % end
2783
2787
2784 for f in files:
2788 for f in files:
2785 ui.write(format % repo.pathto(f, cwd))
2789 ui.write(format % repo.pathto(f, cwd))
2786 if f in copy:
2790 if f in copy:
2787 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2791 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2788
2792
2789 def tag(ui, repo, name1, *names, **opts):
2793 def tag(ui, repo, name1, *names, **opts):
2790 """add one or more tags for the current or given revision
2794 """add one or more tags for the current or given revision
2791
2795
2792 Name a particular revision using <name>.
2796 Name a particular revision using <name>.
2793
2797
2794 Tags are used to name particular revisions of the repository and are
2798 Tags are used to name particular revisions of the repository and are
2795 very useful to compare different revisions, to go back to significant
2799 very useful to compare different revisions, to go back to significant
2796 earlier versions or to mark branch points as releases, etc.
2800 earlier versions or to mark branch points as releases, etc.
2797
2801
2798 If no revision is given, the parent of the working directory is
2802 If no revision is given, the parent of the working directory is
2799 used, or tip if no revision is checked out.
2803 used, or tip if no revision is checked out.
2800
2804
2801 To facilitate version control, distribution, and merging of tags,
2805 To facilitate version control, distribution, and merging of tags,
2802 they are stored as a file named ".hgtags" which is managed
2806 they are stored as a file named ".hgtags" which is managed
2803 similarly to other project files and can be hand-edited if
2807 similarly to other project files and can be hand-edited if
2804 necessary. The file '.hg/localtags' is used for local tags (not
2808 necessary. The file '.hg/localtags' is used for local tags (not
2805 shared among repositories).
2809 shared among repositories).
2806
2810
2807 See 'hg help dates' for a list of formats valid for -d/--date.
2811 See 'hg help dates' for a list of formats valid for -d/--date.
2808 """
2812 """
2809
2813
2810 rev_ = "."
2814 rev_ = "."
2811 names = (name1,) + names
2815 names = (name1,) + names
2812 if len(names) != len(set(names)):
2816 if len(names) != len(set(names)):
2813 raise util.Abort(_('tag names must be unique'))
2817 raise util.Abort(_('tag names must be unique'))
2814 for n in names:
2818 for n in names:
2815 if n in ['tip', '.', 'null']:
2819 if n in ['tip', '.', 'null']:
2816 raise util.Abort(_('the name \'%s\' is reserved') % n)
2820 raise util.Abort(_('the name \'%s\' is reserved') % n)
2817 if opts.get('rev') and opts.get('remove'):
2821 if opts.get('rev') and opts.get('remove'):
2818 raise util.Abort(_("--rev and --remove are incompatible"))
2822 raise util.Abort(_("--rev and --remove are incompatible"))
2819 if opts.get('rev'):
2823 if opts.get('rev'):
2820 rev_ = opts['rev']
2824 rev_ = opts['rev']
2821 message = opts.get('message')
2825 message = opts.get('message')
2822 if opts.get('remove'):
2826 if opts.get('remove'):
2823 expectedtype = opts.get('local') and 'local' or 'global'
2827 expectedtype = opts.get('local') and 'local' or 'global'
2824 for n in names:
2828 for n in names:
2825 if not repo.tagtype(n):
2829 if not repo.tagtype(n):
2826 raise util.Abort(_('tag \'%s\' does not exist') % n)
2830 raise util.Abort(_('tag \'%s\' does not exist') % n)
2827 if repo.tagtype(n) != expectedtype:
2831 if repo.tagtype(n) != expectedtype:
2828 if expectedtype == 'global':
2832 if expectedtype == 'global':
2829 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2833 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2830 else:
2834 else:
2831 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2835 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2832 rev_ = nullid
2836 rev_ = nullid
2833 if not message:
2837 if not message:
2834 message = _('Removed tag %s') % ', '.join(names)
2838 message = _('Removed tag %s') % ', '.join(names)
2835 elif not opts.get('force'):
2839 elif not opts.get('force'):
2836 for n in names:
2840 for n in names:
2837 if n in repo.tags():
2841 if n in repo.tags():
2838 raise util.Abort(_('tag \'%s\' already exists '
2842 raise util.Abort(_('tag \'%s\' already exists '
2839 '(use -f to force)') % n)
2843 '(use -f to force)') % n)
2840 if not rev_ and repo.dirstate.parents()[1] != nullid:
2844 if not rev_ and repo.dirstate.parents()[1] != nullid:
2841 raise util.Abort(_('uncommitted merge - please provide a '
2845 raise util.Abort(_('uncommitted merge - please provide a '
2842 'specific revision'))
2846 'specific revision'))
2843 r = repo[rev_].node()
2847 r = repo[rev_].node()
2844
2848
2845 if not message:
2849 if not message:
2846 message = (_('Added tag %s for changeset %s') %
2850 message = (_('Added tag %s for changeset %s') %
2847 (', '.join(names), short(r)))
2851 (', '.join(names), short(r)))
2848
2852
2849 date = opts.get('date')
2853 date = opts.get('date')
2850 if date:
2854 if date:
2851 date = util.parsedate(date)
2855 date = util.parsedate(date)
2852
2856
2853 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2857 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2854
2858
2855 def tags(ui, repo):
2859 def tags(ui, repo):
2856 """list repository tags
2860 """list repository tags
2857
2861
2858 This lists both regular and local tags. When the -v/--verbose
2862 This lists both regular and local tags. When the -v/--verbose
2859 switch is used, a third column "local" is printed for local tags.
2863 switch is used, a third column "local" is printed for local tags.
2860 """
2864 """
2861
2865
2862 hexfunc = ui.debugflag and hex or short
2866 hexfunc = ui.debugflag and hex or short
2863 tagtype = ""
2867 tagtype = ""
2864
2868
2865 for t, n in reversed(repo.tagslist()):
2869 for t, n in reversed(repo.tagslist()):
2866 if ui.quiet:
2870 if ui.quiet:
2867 ui.write("%s\n" % t)
2871 ui.write("%s\n" % t)
2868 continue
2872 continue
2869
2873
2870 try:
2874 try:
2871 hn = hexfunc(n)
2875 hn = hexfunc(n)
2872 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2876 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2873 except error.LookupError:
2877 except error.LookupError:
2874 r = " ?:%s" % hn
2878 r = " ?:%s" % hn
2875 else:
2879 else:
2876 spaces = " " * (30 - encoding.colwidth(t))
2880 spaces = " " * (30 - encoding.colwidth(t))
2877 if ui.verbose:
2881 if ui.verbose:
2878 if repo.tagtype(t) == 'local':
2882 if repo.tagtype(t) == 'local':
2879 tagtype = " local"
2883 tagtype = " local"
2880 else:
2884 else:
2881 tagtype = ""
2885 tagtype = ""
2882 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2886 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2883
2887
2884 def tip(ui, repo, **opts):
2888 def tip(ui, repo, **opts):
2885 """show the tip revision
2889 """show the tip revision
2886
2890
2887 The tip revision (usually just called the tip) is the most
2891 The tip revision (usually just called the tip) is the most
2888 recently added changeset in the repository, the most recently
2892 recently added changeset in the repository, the most recently
2889 changed head.
2893 changed head.
2890
2894
2891 If you have just made a commit, that commit will be the tip. If
2895 If you have just made a commit, that commit will be the tip. If
2892 you have just pulled changes from another repository, the tip of
2896 you have just pulled changes from another repository, the tip of
2893 that repository becomes the current tip. The "tip" tag is special
2897 that repository becomes the current tip. The "tip" tag is special
2894 and cannot be renamed or assigned to a different changeset.
2898 and cannot be renamed or assigned to a different changeset.
2895 """
2899 """
2896 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2900 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2897
2901
2898 def unbundle(ui, repo, fname1, *fnames, **opts):
2902 def unbundle(ui, repo, fname1, *fnames, **opts):
2899 """apply one or more changegroup files
2903 """apply one or more changegroup files
2900
2904
2901 Apply one or more compressed changegroup files generated by the
2905 Apply one or more compressed changegroup files generated by the
2902 bundle command.
2906 bundle command.
2903 """
2907 """
2904 fnames = (fname1,) + fnames
2908 fnames = (fname1,) + fnames
2905
2909
2906 lock = repo.lock()
2910 lock = repo.lock()
2907 try:
2911 try:
2908 for fname in fnames:
2912 for fname in fnames:
2909 f = url.open(ui, fname)
2913 f = url.open(ui, fname)
2910 gen = changegroup.readbundle(f, fname)
2914 gen = changegroup.readbundle(f, fname)
2911 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2915 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2912 finally:
2916 finally:
2913 lock.release()
2917 lock.release()
2914
2918
2915 return postincoming(ui, repo, modheads, opts.get('update'), None)
2919 return postincoming(ui, repo, modheads, opts.get('update'), None)
2916
2920
2917 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2921 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2918 """update working directory
2922 """update working directory
2919
2923
2920 Update the repository's working directory to the specified
2924 Update the repository's working directory to the specified
2921 revision, or the tip of the current branch if none is specified.
2925 revision, or the tip of the current branch if none is specified.
2922 Use null as the revision to remove the working copy (like 'hg
2926 Use null as the revision to remove the working copy (like 'hg
2923 clone -U').
2927 clone -U').
2924
2928
2925 When the working directory contains no uncommitted changes, it
2929 When the working directory contains no uncommitted changes, it
2926 will be replaced by the state of the requested revision from the
2930 will be replaced by the state of the requested revision from the
2927 repository. When the requested revision is on a different branch,
2931 repository. When the requested revision is on a different branch,
2928 the working directory will additionally be switched to that
2932 the working directory will additionally be switched to that
2929 branch.
2933 branch.
2930
2934
2931 When there are uncommitted changes, use option -C/--clean to
2935 When there are uncommitted changes, use option -C/--clean to
2932 discard them, forcibly replacing the state of the working
2936 discard them, forcibly replacing the state of the working
2933 directory with the requested revision.
2937 directory with the requested revision.
2934
2938
2935 When there are uncommitted changes and option -C/--clean is not
2939 When there are uncommitted changes and option -C/--clean is not
2936 used, and the parent revision and requested revision are on the
2940 used, and the parent revision and requested revision are on the
2937 same branch, and one of them is an ancestor of the other, then the
2941 same branch, and one of them is an ancestor of the other, then the
2938 new working directory will contain the requested revision merged
2942 new working directory will contain the requested revision merged
2939 with the uncommitted changes. Otherwise, the update will fail with
2943 with the uncommitted changes. Otherwise, the update will fail with
2940 a suggestion to use 'merge' or 'update -C' instead.
2944 a suggestion to use 'merge' or 'update -C' instead.
2941
2945
2942 If you want to update just one file to an older revision, use
2946 If you want to update just one file to an older revision, use
2943 revert.
2947 revert.
2944
2948
2945 See 'hg help dates' for a list of formats valid for -d/--date.
2949 See 'hg help dates' for a list of formats valid for -d/--date.
2946 """
2950 """
2947 if rev and node:
2951 if rev and node:
2948 raise util.Abort(_("please specify just one revision"))
2952 raise util.Abort(_("please specify just one revision"))
2949
2953
2950 if not rev:
2954 if not rev:
2951 rev = node
2955 rev = node
2952
2956
2953 if date:
2957 if date:
2954 if rev:
2958 if rev:
2955 raise util.Abort(_("you can't specify a revision and a date"))
2959 raise util.Abort(_("you can't specify a revision and a date"))
2956 rev = cmdutil.finddate(ui, repo, date)
2960 rev = cmdutil.finddate(ui, repo, date)
2957
2961
2958 if clean:
2962 if clean:
2959 return hg.clean(repo, rev)
2963 return hg.clean(repo, rev)
2960 else:
2964 else:
2961 return hg.update(repo, rev)
2965 return hg.update(repo, rev)
2962
2966
2963 def verify(ui, repo):
2967 def verify(ui, repo):
2964 """verify the integrity of the repository
2968 """verify the integrity of the repository
2965
2969
2966 Verify the integrity of the current repository.
2970 Verify the integrity of the current repository.
2967
2971
2968 This will perform an extensive check of the repository's
2972 This will perform an extensive check of the repository's
2969 integrity, validating the hashes and checksums of each entry in
2973 integrity, validating the hashes and checksums of each entry in
2970 the changelog, manifest, and tracked files, as well as the
2974 the changelog, manifest, and tracked files, as well as the
2971 integrity of their crosslinks and indices.
2975 integrity of their crosslinks and indices.
2972 """
2976 """
2973 return hg.verify(repo)
2977 return hg.verify(repo)
2974
2978
2975 def version_(ui):
2979 def version_(ui):
2976 """output version and copyright information"""
2980 """output version and copyright information"""
2977 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2981 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2978 % util.version())
2982 % util.version())
2979 ui.status(_(
2983 ui.status(_(
2980 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2984 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2981 "This is free software; see the source for copying conditions. "
2985 "This is free software; see the source for copying conditions. "
2982 "There is NO\nwarranty; "
2986 "There is NO\nwarranty; "
2983 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2987 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2984 ))
2988 ))
2985
2989
2986 # Command options and aliases are listed here, alphabetically
2990 # Command options and aliases are listed here, alphabetically
2987
2991
2988 globalopts = [
2992 globalopts = [
2989 ('R', 'repository', '',
2993 ('R', 'repository', '',
2990 _('repository root directory or symbolic path name')),
2994 _('repository root directory or symbolic path name')),
2991 ('', 'cwd', '', _('change working directory')),
2995 ('', 'cwd', '', _('change working directory')),
2992 ('y', 'noninteractive', None,
2996 ('y', 'noninteractive', None,
2993 _('do not prompt, assume \'yes\' for any required answers')),
2997 _('do not prompt, assume \'yes\' for any required answers')),
2994 ('q', 'quiet', None, _('suppress output')),
2998 ('q', 'quiet', None, _('suppress output')),
2995 ('v', 'verbose', None, _('enable additional output')),
2999 ('v', 'verbose', None, _('enable additional output')),
2996 ('', 'config', [], _('set/override config option')),
3000 ('', 'config', [], _('set/override config option')),
2997 ('', 'debug', None, _('enable debugging output')),
3001 ('', 'debug', None, _('enable debugging output')),
2998 ('', 'debugger', None, _('start debugger')),
3002 ('', 'debugger', None, _('start debugger')),
2999 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3003 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3000 ('', 'encodingmode', encoding.encodingmode,
3004 ('', 'encodingmode', encoding.encodingmode,
3001 _('set the charset encoding mode')),
3005 _('set the charset encoding mode')),
3002 ('', 'traceback', None, _('print traceback on exception')),
3006 ('', 'traceback', None, _('print traceback on exception')),
3003 ('', 'time', None, _('time how long the command takes')),
3007 ('', 'time', None, _('time how long the command takes')),
3004 ('', 'profile', None, _('print command execution profile')),
3008 ('', 'profile', None, _('print command execution profile')),
3005 ('', 'version', None, _('output version information and exit')),
3009 ('', 'version', None, _('output version information and exit')),
3006 ('h', 'help', None, _('display help and exit')),
3010 ('h', 'help', None, _('display help and exit')),
3007 ]
3011 ]
3008
3012
3009 dryrunopts = [('n', 'dry-run', None,
3013 dryrunopts = [('n', 'dry-run', None,
3010 _('do not perform actions, just print output'))]
3014 _('do not perform actions, just print output'))]
3011
3015
3012 remoteopts = [
3016 remoteopts = [
3013 ('e', 'ssh', '', _('specify ssh command to use')),
3017 ('e', 'ssh', '', _('specify ssh command to use')),
3014 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3018 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3015 ]
3019 ]
3016
3020
3017 walkopts = [
3021 walkopts = [
3018 ('I', 'include', [], _('include names matching the given patterns')),
3022 ('I', 'include', [], _('include names matching the given patterns')),
3019 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3023 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3020 ]
3024 ]
3021
3025
3022 commitopts = [
3026 commitopts = [
3023 ('m', 'message', '', _('use <text> as commit message')),
3027 ('m', 'message', '', _('use <text> as commit message')),
3024 ('l', 'logfile', '', _('read commit message from <file>')),
3028 ('l', 'logfile', '', _('read commit message from <file>')),
3025 ]
3029 ]
3026
3030
3027 commitopts2 = [
3031 commitopts2 = [
3028 ('d', 'date', '', _('record datecode as commit date')),
3032 ('d', 'date', '', _('record datecode as commit date')),
3029 ('u', 'user', '', _('record the specified user as committer')),
3033 ('u', 'user', '', _('record the specified user as committer')),
3030 ]
3034 ]
3031
3035
3032 templateopts = [
3036 templateopts = [
3033 ('', 'style', '', _('display using template map file')),
3037 ('', 'style', '', _('display using template map file')),
3034 ('', 'template', '', _('display with template')),
3038 ('', 'template', '', _('display with template')),
3035 ]
3039 ]
3036
3040
3037 logopts = [
3041 logopts = [
3038 ('p', 'patch', None, _('show patch')),
3042 ('p', 'patch', None, _('show patch')),
3039 ('g', 'git', None, _('use git extended diff format')),
3043 ('g', 'git', None, _('use git extended diff format')),
3040 ('l', 'limit', '', _('limit number of changes displayed')),
3044 ('l', 'limit', '', _('limit number of changes displayed')),
3041 ('M', 'no-merges', None, _('do not show merges')),
3045 ('M', 'no-merges', None, _('do not show merges')),
3042 ] + templateopts
3046 ] + templateopts
3043
3047
3044 diffopts = [
3048 diffopts = [
3045 ('a', 'text', None, _('treat all files as text')),
3049 ('a', 'text', None, _('treat all files as text')),
3046 ('g', 'git', None, _('use git extended diff format')),
3050 ('g', 'git', None, _('use git extended diff format')),
3047 ('', 'nodates', None, _("don't include dates in diff headers"))
3051 ('', 'nodates', None, _("don't include dates in diff headers"))
3048 ]
3052 ]
3049
3053
3050 diffopts2 = [
3054 diffopts2 = [
3051 ('p', 'show-function', None, _('show which function each change is in')),
3055 ('p', 'show-function', None, _('show which function each change is in')),
3052 ('w', 'ignore-all-space', None,
3056 ('w', 'ignore-all-space', None,
3053 _('ignore white space when comparing lines')),
3057 _('ignore white space when comparing lines')),
3054 ('b', 'ignore-space-change', None,
3058 ('b', 'ignore-space-change', None,
3055 _('ignore changes in the amount of white space')),
3059 _('ignore changes in the amount of white space')),
3056 ('B', 'ignore-blank-lines', None,
3060 ('B', 'ignore-blank-lines', None,
3057 _('ignore changes whose lines are all blank')),
3061 _('ignore changes whose lines are all blank')),
3058 ('U', 'unified', '', _('number of lines of context to show'))
3062 ('U', 'unified', '', _('number of lines of context to show'))
3059 ]
3063 ]
3060
3064
3061 similarityopts = [
3065 similarityopts = [
3062 ('s', 'similarity', '',
3066 ('s', 'similarity', '',
3063 _('guess renamed files by similarity (0<=s<=100)'))
3067 _('guess renamed files by similarity (0<=s<=100)'))
3064 ]
3068 ]
3065
3069
3066 table = {
3070 table = {
3067 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3071 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3068 "addremove":
3072 "addremove":
3069 (addremove, similarityopts + walkopts + dryrunopts,
3073 (addremove, similarityopts + walkopts + dryrunopts,
3070 _('[OPTION]... [FILE]...')),
3074 _('[OPTION]... [FILE]...')),
3071 "^annotate|blame":
3075 "^annotate|blame":
3072 (annotate,
3076 (annotate,
3073 [('r', 'rev', '', _('annotate the specified revision')),
3077 [('r', 'rev', '', _('annotate the specified revision')),
3074 ('f', 'follow', None, _('follow file copies and renames')),
3078 ('f', 'follow', None, _('follow file copies and renames')),
3075 ('a', 'text', None, _('treat all files as text')),
3079 ('a', 'text', None, _('treat all files as text')),
3076 ('u', 'user', None, _('list the author (long with -v)')),
3080 ('u', 'user', None, _('list the author (long with -v)')),
3077 ('d', 'date', None, _('list the date (short with -q)')),
3081 ('d', 'date', None, _('list the date (short with -q)')),
3078 ('n', 'number', None, _('list the revision number (default)')),
3082 ('n', 'number', None, _('list the revision number (default)')),
3079 ('c', 'changeset', None, _('list the changeset')),
3083 ('c', 'changeset', None, _('list the changeset')),
3080 ('l', 'line-number', None,
3084 ('l', 'line-number', None,
3081 _('show line number at the first appearance'))
3085 _('show line number at the first appearance'))
3082 ] + walkopts,
3086 ] + walkopts,
3083 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3087 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3084 "archive":
3088 "archive":
3085 (archive,
3089 (archive,
3086 [('', 'no-decode', None, _('do not pass files through decoders')),
3090 [('', 'no-decode', None, _('do not pass files through decoders')),
3087 ('p', 'prefix', '', _('directory prefix for files in archive')),
3091 ('p', 'prefix', '', _('directory prefix for files in archive')),
3088 ('r', 'rev', '', _('revision to distribute')),
3092 ('r', 'rev', '', _('revision to distribute')),
3089 ('t', 'type', '', _('type of distribution to create')),
3093 ('t', 'type', '', _('type of distribution to create')),
3090 ] + walkopts,
3094 ] + walkopts,
3091 _('[OPTION]... DEST')),
3095 _('[OPTION]... DEST')),
3092 "backout":
3096 "backout":
3093 (backout,
3097 (backout,
3094 [('', 'merge', None,
3098 [('', 'merge', None,
3095 _('merge with old dirstate parent after backout')),
3099 _('merge with old dirstate parent after backout')),
3096 ('', 'parent', '', _('parent to choose when backing out merge')),
3100 ('', 'parent', '', _('parent to choose when backing out merge')),
3097 ('r', 'rev', '', _('revision to backout')),
3101 ('r', 'rev', '', _('revision to backout')),
3098 ] + walkopts + commitopts + commitopts2,
3102 ] + walkopts + commitopts + commitopts2,
3099 _('[OPTION]... [-r] REV')),
3103 _('[OPTION]... [-r] REV')),
3100 "bisect":
3104 "bisect":
3101 (bisect,
3105 (bisect,
3102 [('r', 'reset', False, _('reset bisect state')),
3106 [('r', 'reset', False, _('reset bisect state')),
3103 ('g', 'good', False, _('mark changeset good')),
3107 ('g', 'good', False, _('mark changeset good')),
3104 ('b', 'bad', False, _('mark changeset bad')),
3108 ('b', 'bad', False, _('mark changeset bad')),
3105 ('s', 'skip', False, _('skip testing changeset')),
3109 ('s', 'skip', False, _('skip testing changeset')),
3106 ('c', 'command', '', _('use command to check changeset state')),
3110 ('c', 'command', '', _('use command to check changeset state')),
3107 ('U', 'noupdate', False, _('do not update to target'))],
3111 ('U', 'noupdate', False, _('do not update to target'))],
3108 _("[-gbsr] [-c CMD] [REV]")),
3112 _("[-gbsr] [-c CMD] [REV]")),
3109 "branch":
3113 "branch":
3110 (branch,
3114 (branch,
3111 [('f', 'force', None,
3115 [('f', 'force', None,
3112 _('set branch name even if it shadows an existing branch')),
3116 _('set branch name even if it shadows an existing branch')),
3113 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3117 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3114 _('[-fC] [NAME]')),
3118 _('[-fC] [NAME]')),
3115 "branches":
3119 "branches":
3116 (branches,
3120 (branches,
3117 [('a', 'active', False,
3121 [('a', 'active', False,
3118 _('show only branches that have unmerged heads'))],
3122 _('show only branches that have unmerged heads'))],
3119 _('[-a]')),
3123 _('[-a]')),
3120 "bundle":
3124 "bundle":
3121 (bundle,
3125 (bundle,
3122 [('f', 'force', None,
3126 [('f', 'force', None,
3123 _('run even when remote repository is unrelated')),
3127 _('run even when remote repository is unrelated')),
3124 ('r', 'rev', [],
3128 ('r', 'rev', [],
3125 _('a changeset up to which you would like to bundle')),
3129 _('a changeset up to which you would like to bundle')),
3126 ('', 'base', [],
3130 ('', 'base', [],
3127 _('a base changeset to specify instead of a destination')),
3131 _('a base changeset to specify instead of a destination')),
3128 ('a', 'all', None, _('bundle all changesets in the repository')),
3132 ('a', 'all', None, _('bundle all changesets in the repository')),
3129 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3133 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3130 ] + remoteopts,
3134 ] + remoteopts,
3131 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3135 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3132 "cat":
3136 "cat":
3133 (cat,
3137 (cat,
3134 [('o', 'output', '', _('print output to file with formatted name')),
3138 [('o', 'output', '', _('print output to file with formatted name')),
3135 ('r', 'rev', '', _('print the given revision')),
3139 ('r', 'rev', '', _('print the given revision')),
3136 ('', 'decode', None, _('apply any matching decode filter')),
3140 ('', 'decode', None, _('apply any matching decode filter')),
3137 ] + walkopts,
3141 ] + walkopts,
3138 _('[OPTION]... FILE...')),
3142 _('[OPTION]... FILE...')),
3139 "^clone":
3143 "^clone":
3140 (clone,
3144 (clone,
3141 [('U', 'noupdate', None,
3145 [('U', 'noupdate', None,
3142 _('the clone will only contain a repository (no working copy)')),
3146 _('the clone will only contain a repository (no working copy)')),
3143 ('r', 'rev', [],
3147 ('r', 'rev', [],
3144 _('a changeset you would like to have after cloning')),
3148 _('a changeset you would like to have after cloning')),
3145 ('', 'pull', None, _('use pull protocol to copy metadata')),
3149 ('', 'pull', None, _('use pull protocol to copy metadata')),
3146 ('', 'uncompressed', None,
3150 ('', 'uncompressed', None,
3147 _('use uncompressed transfer (fast over LAN)')),
3151 _('use uncompressed transfer (fast over LAN)')),
3148 ] + remoteopts,
3152 ] + remoteopts,
3149 _('[OPTION]... SOURCE [DEST]')),
3153 _('[OPTION]... SOURCE [DEST]')),
3150 "^commit|ci":
3154 "^commit|ci":
3151 (commit,
3155 (commit,
3152 [('A', 'addremove', None,
3156 [('A', 'addremove', None,
3153 _('mark new/missing files as added/removed before committing')),
3157 _('mark new/missing files as added/removed before committing')),
3154 ('', 'close-branch', None,
3158 ('', 'close-branch', None,
3155 _('mark a branch as closed, hiding it from the branch list')),
3159 _('mark a branch as closed, hiding it from the branch list')),
3156 ] + walkopts + commitopts + commitopts2,
3160 ] + walkopts + commitopts + commitopts2,
3157 _('[OPTION]... [FILE]...')),
3161 _('[OPTION]... [FILE]...')),
3158 "copy|cp":
3162 "copy|cp":
3159 (copy,
3163 (copy,
3160 [('A', 'after', None, _('record a copy that has already occurred')),
3164 [('A', 'after', None, _('record a copy that has already occurred')),
3161 ('f', 'force', None,
3165 ('f', 'force', None,
3162 _('forcibly copy over an existing managed file')),
3166 _('forcibly copy over an existing managed file')),
3163 ] + walkopts + dryrunopts,
3167 ] + walkopts + dryrunopts,
3164 _('[OPTION]... [SOURCE]... DEST')),
3168 _('[OPTION]... [SOURCE]... DEST')),
3165 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3169 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3166 "debugcheckstate": (debugcheckstate, []),
3170 "debugcheckstate": (debugcheckstate, []),
3167 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3171 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3168 "debugcomplete":
3172 "debugcomplete":
3169 (debugcomplete,
3173 (debugcomplete,
3170 [('o', 'options', None, _('show the command options'))],
3174 [('o', 'options', None, _('show the command options'))],
3171 _('[-o] CMD')),
3175 _('[-o] CMD')),
3172 "debugdate":
3176 "debugdate":
3173 (debugdate,
3177 (debugdate,
3174 [('e', 'extended', None, _('try extended date formats'))],
3178 [('e', 'extended', None, _('try extended date formats'))],
3175 _('[-e] DATE [RANGE]')),
3179 _('[-e] DATE [RANGE]')),
3176 "debugdata": (debugdata, [], _('FILE REV')),
3180 "debugdata": (debugdata, [], _('FILE REV')),
3177 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3181 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3178 "debugindex": (debugindex, [], _('FILE')),
3182 "debugindex": (debugindex, [], _('FILE')),
3179 "debugindexdot": (debugindexdot, [], _('FILE')),
3183 "debugindexdot": (debugindexdot, [], _('FILE')),
3180 "debuginstall": (debuginstall, []),
3184 "debuginstall": (debuginstall, []),
3181 "debugrebuildstate":
3185 "debugrebuildstate":
3182 (debugrebuildstate,
3186 (debugrebuildstate,
3183 [('r', 'rev', '', _('revision to rebuild to'))],
3187 [('r', 'rev', '', _('revision to rebuild to'))],
3184 _('[-r REV] [REV]')),
3188 _('[-r REV] [REV]')),
3185 "debugrename":
3189 "debugrename":
3186 (debugrename,
3190 (debugrename,
3187 [('r', 'rev', '', _('revision to debug'))],
3191 [('r', 'rev', '', _('revision to debug'))],
3188 _('[-r REV] FILE')),
3192 _('[-r REV] FILE')),
3189 "debugsetparents":
3193 "debugsetparents":
3190 (debugsetparents, [], _('REV1 [REV2]')),
3194 (debugsetparents, [], _('REV1 [REV2]')),
3191 "debugstate":
3195 "debugstate":
3192 (debugstate,
3196 (debugstate,
3193 [('', 'nodates', None, _('do not display the saved mtime'))],
3197 [('', 'nodates', None, _('do not display the saved mtime'))],
3194 _('[OPTION]...')),
3198 _('[OPTION]...')),
3195 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3199 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3196 "^diff":
3200 "^diff":
3197 (diff,
3201 (diff,
3198 [('r', 'rev', [], _('revision')),
3202 [('r', 'rev', [], _('revision')),
3199 ('c', 'change', '', _('change made by revision'))
3203 ('c', 'change', '', _('change made by revision'))
3200 ] + diffopts + diffopts2 + walkopts,
3204 ] + diffopts + diffopts2 + walkopts,
3201 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3205 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3202 "^export":
3206 "^export":
3203 (export,
3207 (export,
3204 [('o', 'output', '', _('print output to file with formatted name')),
3208 [('o', 'output', '', _('print output to file with formatted name')),
3205 ('', 'switch-parent', None, _('diff against the second parent'))
3209 ('', 'switch-parent', None, _('diff against the second parent'))
3206 ] + diffopts,
3210 ] + diffopts,
3207 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3211 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3208 "grep":
3212 "grep":
3209 (grep,
3213 (grep,
3210 [('0', 'print0', None, _('end fields with NUL')),
3214 [('0', 'print0', None, _('end fields with NUL')),
3211 ('', 'all', None, _('print all revisions that match')),
3215 ('', 'all', None, _('print all revisions that match')),
3212 ('f', 'follow', None,
3216 ('f', 'follow', None,
3213 _('follow changeset history, or file history across copies and renames')),
3217 _('follow changeset history, or file history across copies and renames')),
3214 ('i', 'ignore-case', None, _('ignore case when matching')),
3218 ('i', 'ignore-case', None, _('ignore case when matching')),
3215 ('l', 'files-with-matches', None,
3219 ('l', 'files-with-matches', None,
3216 _('print only filenames and revisions that match')),
3220 _('print only filenames and revisions that match')),
3217 ('n', 'line-number', None, _('print matching line numbers')),
3221 ('n', 'line-number', None, _('print matching line numbers')),
3218 ('r', 'rev', [], _('search in given revision range')),
3222 ('r', 'rev', [], _('search in given revision range')),
3219 ('u', 'user', None, _('list the author (long with -v)')),
3223 ('u', 'user', None, _('list the author (long with -v)')),
3220 ('d', 'date', None, _('list the date (short with -q)')),
3224 ('d', 'date', None, _('list the date (short with -q)')),
3221 ] + walkopts,
3225 ] + walkopts,
3222 _('[OPTION]... PATTERN [FILE]...')),
3226 _('[OPTION]... PATTERN [FILE]...')),
3223 "heads":
3227 "heads":
3224 (heads,
3228 (heads,
3225 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3229 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3226 ('a', 'active', False,
3230 ('a', 'active', False,
3227 _('show only the active heads from open branches')),
3231 _('show only the active heads from open branches')),
3228 ] + templateopts,
3232 ] + templateopts,
3229 _('[-r REV] [REV]...')),
3233 _('[-r REV] [REV]...')),
3230 "help": (help_, [], _('[TOPIC]')),
3234 "help": (help_, [], _('[TOPIC]')),
3231 "identify|id":
3235 "identify|id":
3232 (identify,
3236 (identify,
3233 [('r', 'rev', '', _('identify the specified revision')),
3237 [('r', 'rev', '', _('identify the specified revision')),
3234 ('n', 'num', None, _('show local revision number')),
3238 ('n', 'num', None, _('show local revision number')),
3235 ('i', 'id', None, _('show global revision id')),
3239 ('i', 'id', None, _('show global revision id')),
3236 ('b', 'branch', None, _('show branch')),
3240 ('b', 'branch', None, _('show branch')),
3237 ('t', 'tags', None, _('show tags'))],
3241 ('t', 'tags', None, _('show tags'))],
3238 _('[-nibt] [-r REV] [SOURCE]')),
3242 _('[-nibt] [-r REV] [SOURCE]')),
3239 "import|patch":
3243 "import|patch":
3240 (import_,
3244 (import_,
3241 [('p', 'strip', 1,
3245 [('p', 'strip', 1,
3242 _('directory strip option for patch. This has the same '
3246 _('directory strip option for patch. This has the same '
3243 'meaning as the corresponding patch option')),
3247 'meaning as the corresponding patch option')),
3244 ('b', 'base', '', _('base path')),
3248 ('b', 'base', '', _('base path')),
3245 ('f', 'force', None,
3249 ('f', 'force', None,
3246 _('skip check for outstanding uncommitted changes')),
3250 _('skip check for outstanding uncommitted changes')),
3247 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3251 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3248 ('', 'exact', None,
3252 ('', 'exact', None,
3249 _('apply patch to the nodes from which it was generated')),
3253 _('apply patch to the nodes from which it was generated')),
3250 ('', 'import-branch', None,
3254 ('', 'import-branch', None,
3251 _('use any branch information in patch (implied by --exact)'))] +
3255 _('use any branch information in patch (implied by --exact)'))] +
3252 commitopts + commitopts2 + similarityopts,
3256 commitopts + commitopts2 + similarityopts,
3253 _('[OPTION]... PATCH...')),
3257 _('[OPTION]... PATCH...')),
3254 "incoming|in":
3258 "incoming|in":
3255 (incoming,
3259 (incoming,
3256 [('f', 'force', None,
3260 [('f', 'force', None,
3257 _('run even when remote repository is unrelated')),
3261 _('run even when remote repository is unrelated')),
3258 ('n', 'newest-first', None, _('show newest record first')),
3262 ('n', 'newest-first', None, _('show newest record first')),
3259 ('', 'bundle', '', _('file to store the bundles into')),
3263 ('', 'bundle', '', _('file to store the bundles into')),
3260 ('r', 'rev', [],
3264 ('r', 'rev', [],
3261 _('a specific revision up to which you would like to pull')),
3265 _('a specific revision up to which you would like to pull')),
3262 ] + logopts + remoteopts,
3266 ] + logopts + remoteopts,
3263 _('[-p] [-n] [-M] [-f] [-r REV]...'
3267 _('[-p] [-n] [-M] [-f] [-r REV]...'
3264 ' [--bundle FILENAME] [SOURCE]')),
3268 ' [--bundle FILENAME] [SOURCE]')),
3265 "^init":
3269 "^init":
3266 (init,
3270 (init,
3267 remoteopts,
3271 remoteopts,
3268 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3272 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3269 "locate":
3273 "locate":
3270 (locate,
3274 (locate,
3271 [('r', 'rev', '', _('search the repository as it stood at REV')),
3275 [('r', 'rev', '', _('search the repository as it stood at REV')),
3272 ('0', 'print0', None,
3276 ('0', 'print0', None,
3273 _('end filenames with NUL, for use with xargs')),
3277 _('end filenames with NUL, for use with xargs')),
3274 ('f', 'fullpath', None,
3278 ('f', 'fullpath', None,
3275 _('print complete paths from the filesystem root')),
3279 _('print complete paths from the filesystem root')),
3276 ] + walkopts,
3280 ] + walkopts,
3277 _('[OPTION]... [PATTERN]...')),
3281 _('[OPTION]... [PATTERN]...')),
3278 "^log|history":
3282 "^log|history":
3279 (log,
3283 (log,
3280 [('f', 'follow', None,
3284 [('f', 'follow', None,
3281 _('follow changeset history, or file history across copies and renames')),
3285 _('follow changeset history, or file history across copies and renames')),
3282 ('', 'follow-first', None,
3286 ('', 'follow-first', None,
3283 _('only follow the first parent of merge changesets')),
3287 _('only follow the first parent of merge changesets')),
3284 ('d', 'date', '', _('show revisions matching date spec')),
3288 ('d', 'date', '', _('show revisions matching date spec')),
3285 ('C', 'copies', None, _('show copied files')),
3289 ('C', 'copies', None, _('show copied files')),
3286 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3290 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3287 ('r', 'rev', [], _('show the specified revision or range')),
3291 ('r', 'rev', [], _('show the specified revision or range')),
3288 ('', 'removed', None, _('include revisions where files were removed')),
3292 ('', 'removed', None, _('include revisions where files were removed')),
3289 ('m', 'only-merges', None, _('show only merges')),
3293 ('m', 'only-merges', None, _('show only merges')),
3290 ('u', 'user', [], _('revisions committed by user')),
3294 ('u', 'user', [], _('revisions committed by user')),
3291 ('b', 'only-branch', [],
3295 ('b', 'only-branch', [],
3292 _('show only changesets within the given named branch')),
3296 _('show only changesets within the given named branch')),
3293 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3297 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3294 ] + logopts + walkopts,
3298 ] + logopts + walkopts,
3295 _('[OPTION]... [FILE]')),
3299 _('[OPTION]... [FILE]')),
3296 "manifest":
3300 "manifest":
3297 (manifest,
3301 (manifest,
3298 [('r', 'rev', '', _('revision to display'))],
3302 [('r', 'rev', '', _('revision to display'))],
3299 _('[-r REV]')),
3303 _('[-r REV]')),
3300 "^merge":
3304 "^merge":
3301 (merge,
3305 (merge,
3302 [('f', 'force', None, _('force a merge with outstanding changes')),
3306 [('f', 'force', None, _('force a merge with outstanding changes')),
3303 ('r', 'rev', '', _('revision to merge')),
3307 ('r', 'rev', '', _('revision to merge')),
3304 ('S', 'show', None,
3308 ('S', 'show', None,
3305 _('review revisions to merge (no merge is performed)'))],
3309 _('review revisions to merge (no merge is performed)'))],
3306 _('[-f] [[-r] REV]')),
3310 _('[-f] [[-r] REV]')),
3307 "outgoing|out":
3311 "outgoing|out":
3308 (outgoing,
3312 (outgoing,
3309 [('f', 'force', None,
3313 [('f', 'force', None,
3310 _('run even when remote repository is unrelated')),
3314 _('run even when remote repository is unrelated')),
3311 ('r', 'rev', [],
3315 ('r', 'rev', [],
3312 _('a specific revision up to which you would like to push')),
3316 _('a specific revision up to which you would like to push')),
3313 ('n', 'newest-first', None, _('show newest record first')),
3317 ('n', 'newest-first', None, _('show newest record first')),
3314 ] + logopts + remoteopts,
3318 ] + logopts + remoteopts,
3315 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3319 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3316 "^parents":
3320 "^parents":
3317 (parents,
3321 (parents,
3318 [('r', 'rev', '', _('show parents from the specified revision')),
3322 [('r', 'rev', '', _('show parents from the specified revision')),
3319 ] + templateopts,
3323 ] + templateopts,
3320 _('hg parents [-r REV] [FILE]')),
3324 _('hg parents [-r REV] [FILE]')),
3321 "paths": (paths, [], _('[NAME]')),
3325 "paths": (paths, [], _('[NAME]')),
3322 "^pull":
3326 "^pull":
3323 (pull,
3327 (pull,
3324 [('u', 'update', None,
3328 [('u', 'update', None,
3325 _('update to new tip if changesets were pulled')),
3329 _('update to new tip if changesets were pulled')),
3326 ('f', 'force', None,
3330 ('f', 'force', None,
3327 _('run even when remote repository is unrelated')),
3331 _('run even when remote repository is unrelated')),
3328 ('r', 'rev', [],
3332 ('r', 'rev', [],
3329 _('a specific revision up to which you would like to pull')),
3333 _('a specific revision up to which you would like to pull')),
3330 ] + remoteopts,
3334 ] + remoteopts,
3331 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3335 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3332 "^push":
3336 "^push":
3333 (push,
3337 (push,
3334 [('f', 'force', None, _('force push')),
3338 [('f', 'force', None, _('force push')),
3335 ('r', 'rev', [],
3339 ('r', 'rev', [],
3336 _('a specific revision up to which you would like to push')),
3340 _('a specific revision up to which you would like to push')),
3337 ] + remoteopts,
3341 ] + remoteopts,
3338 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3342 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3339 "recover": (recover, []),
3343 "recover": (recover, []),
3340 "^remove|rm":
3344 "^remove|rm":
3341 (remove,
3345 (remove,
3342 [('A', 'after', None, _('record delete for missing files')),
3346 [('A', 'after', None, _('record delete for missing files')),
3343 ('f', 'force', None,
3347 ('f', 'force', None,
3344 _('remove (and delete) file even if added or modified')),
3348 _('remove (and delete) file even if added or modified')),
3345 ] + walkopts,
3349 ] + walkopts,
3346 _('[OPTION]... FILE...')),
3350 _('[OPTION]... FILE...')),
3347 "rename|mv":
3351 "rename|mv":
3348 (rename,
3352 (rename,
3349 [('A', 'after', None, _('record a rename that has already occurred')),
3353 [('A', 'after', None, _('record a rename that has already occurred')),
3350 ('f', 'force', None,
3354 ('f', 'force', None,
3351 _('forcibly copy over an existing managed file')),
3355 _('forcibly copy over an existing managed file')),
3352 ] + walkopts + dryrunopts,
3356 ] + walkopts + dryrunopts,
3353 _('[OPTION]... SOURCE... DEST')),
3357 _('[OPTION]... SOURCE... DEST')),
3354 "resolve":
3358 "resolve":
3355 (resolve,
3359 (resolve,
3356 [('a', 'all', None, _('remerge all unresolved files')),
3360 [('a', 'all', None, _('remerge all unresolved files')),
3357 ('l', 'list', None, _('list state of files needing merge')),
3361 ('l', 'list', None, _('list state of files needing merge')),
3358 ('m', 'mark', None, _('mark files as resolved')),
3362 ('m', 'mark', None, _('mark files as resolved')),
3359 ('u', 'unmark', None, _('unmark files as resolved'))]
3363 ('u', 'unmark', None, _('unmark files as resolved'))]
3360 + walkopts,
3364 + walkopts,
3361 _('[OPTION]... [FILE]...')),
3365 _('[OPTION]... [FILE]...')),
3362 "revert":
3366 "revert":
3363 (revert,
3367 (revert,
3364 [('a', 'all', None, _('revert all changes when no arguments given')),
3368 [('a', 'all', None, _('revert all changes when no arguments given')),
3365 ('d', 'date', '', _('tipmost revision matching date')),
3369 ('d', 'date', '', _('tipmost revision matching date')),
3366 ('r', 'rev', '', _('revision to revert to')),
3370 ('r', 'rev', '', _('revision to revert to')),
3367 ('', 'no-backup', None, _('do not save backup copies of files')),
3371 ('', 'no-backup', None, _('do not save backup copies of files')),
3368 ] + walkopts + dryrunopts,
3372 ] + walkopts + dryrunopts,
3369 _('[OPTION]... [-r REV] [NAME]...')),
3373 _('[OPTION]... [-r REV] [NAME]...')),
3370 "rollback": (rollback, []),
3374 "rollback": (rollback, []),
3371 "root": (root, []),
3375 "root": (root, []),
3372 "^serve":
3376 "^serve":
3373 (serve,
3377 (serve,
3374 [('A', 'accesslog', '', _('name of access log file to write to')),
3378 [('A', 'accesslog', '', _('name of access log file to write to')),
3375 ('d', 'daemon', None, _('run server in background')),
3379 ('d', 'daemon', None, _('run server in background')),
3376 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3380 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3377 ('E', 'errorlog', '', _('name of error log file to write to')),
3381 ('E', 'errorlog', '', _('name of error log file to write to')),
3378 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3382 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3379 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3383 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3380 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3384 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3381 ('n', 'name', '',
3385 ('n', 'name', '',
3382 _('name to show in web pages (default: working directory)')),
3386 _('name to show in web pages (default: working directory)')),
3383 ('', 'webdir-conf', '', _('name of the webdir config file'
3387 ('', 'webdir-conf', '', _('name of the webdir config file'
3384 ' (serve more than one repository)')),
3388 ' (serve more than one repository)')),
3385 ('', 'pid-file', '', _('name of file to write process ID to')),
3389 ('', 'pid-file', '', _('name of file to write process ID to')),
3386 ('', 'stdio', None, _('for remote clients')),
3390 ('', 'stdio', None, _('for remote clients')),
3387 ('t', 'templates', '', _('web templates to use')),
3391 ('t', 'templates', '', _('web templates to use')),
3388 ('', 'style', '', _('template style to use')),
3392 ('', 'style', '', _('template style to use')),
3389 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3393 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3390 ('', 'certificate', '', _('SSL certificate file'))],
3394 ('', 'certificate', '', _('SSL certificate file'))],
3391 _('[OPTION]...')),
3395 _('[OPTION]...')),
3392 "showconfig|debugconfig":
3396 "showconfig|debugconfig":
3393 (showconfig,
3397 (showconfig,
3394 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3398 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3395 _('[-u] [NAME]...')),
3399 _('[-u] [NAME]...')),
3396 "^status|st":
3400 "^status|st":
3397 (status,
3401 (status,
3398 [('A', 'all', None, _('show status of all files')),
3402 [('A', 'all', None, _('show status of all files')),
3399 ('m', 'modified', None, _('show only modified files')),
3403 ('m', 'modified', None, _('show only modified files')),
3400 ('a', 'added', None, _('show only added files')),
3404 ('a', 'added', None, _('show only added files')),
3401 ('r', 'removed', None, _('show only removed files')),
3405 ('r', 'removed', None, _('show only removed files')),
3402 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3406 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3403 ('c', 'clean', None, _('show only files without changes')),
3407 ('c', 'clean', None, _('show only files without changes')),
3404 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3408 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3405 ('i', 'ignored', None, _('show only ignored files')),
3409 ('i', 'ignored', None, _('show only ignored files')),
3406 ('n', 'no-status', None, _('hide status prefix')),
3410 ('n', 'no-status', None, _('hide status prefix')),
3407 ('C', 'copies', None, _('show source of copied files')),
3411 ('C', 'copies', None, _('show source of copied files')),
3408 ('0', 'print0', None,
3412 ('0', 'print0', None,
3409 _('end filenames with NUL, for use with xargs')),
3413 _('end filenames with NUL, for use with xargs')),
3410 ('', 'rev', [], _('show difference from revision')),
3414 ('', 'rev', [], _('show difference from revision')),
3411 ] + walkopts,
3415 ] + walkopts,
3412 _('[OPTION]... [FILE]...')),
3416 _('[OPTION]... [FILE]...')),
3413 "tag":
3417 "tag":
3414 (tag,
3418 (tag,
3415 [('f', 'force', None, _('replace existing tag')),
3419 [('f', 'force', None, _('replace existing tag')),
3416 ('l', 'local', None, _('make the tag local')),
3420 ('l', 'local', None, _('make the tag local')),
3417 ('r', 'rev', '', _('revision to tag')),
3421 ('r', 'rev', '', _('revision to tag')),
3418 ('', 'remove', None, _('remove a tag')),
3422 ('', 'remove', None, _('remove a tag')),
3419 # -l/--local is already there, commitopts cannot be used
3423 # -l/--local is already there, commitopts cannot be used
3420 ('m', 'message', '', _('use <text> as commit message')),
3424 ('m', 'message', '', _('use <text> as commit message')),
3421 ] + commitopts2,
3425 ] + commitopts2,
3422 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3426 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3423 "tags": (tags, []),
3427 "tags": (tags, []),
3424 "tip":
3428 "tip":
3425 (tip,
3429 (tip,
3426 [('p', 'patch', None, _('show patch')),
3430 [('p', 'patch', None, _('show patch')),
3427 ('g', 'git', None, _('use git extended diff format')),
3431 ('g', 'git', None, _('use git extended diff format')),
3428 ] + templateopts,
3432 ] + templateopts,
3429 _('[-p]')),
3433 _('[-p]')),
3430 "unbundle":
3434 "unbundle":
3431 (unbundle,
3435 (unbundle,
3432 [('u', 'update', None,
3436 [('u', 'update', None,
3433 _('update to new tip if changesets were unbundled'))],
3437 _('update to new tip if changesets were unbundled'))],
3434 _('[-u] FILE...')),
3438 _('[-u] FILE...')),
3435 "^update|up|checkout|co":
3439 "^update|up|checkout|co":
3436 (update,
3440 (update,
3437 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3441 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3438 ('d', 'date', '', _('tipmost revision matching date')),
3442 ('d', 'date', '', _('tipmost revision matching date')),
3439 ('r', 'rev', '', _('revision'))],
3443 ('r', 'rev', '', _('revision'))],
3440 _('[-C] [-d DATE] [[-r] REV]')),
3444 _('[-C] [-d DATE] [[-r] REV]')),
3441 "verify": (verify, []),
3445 "verify": (verify, []),
3442 "version": (version_, []),
3446 "version": (version_, []),
3443 }
3447 }
3444
3448
3445 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3449 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3446 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3450 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3447 optionalrepo = ("identify paths serve showconfig debugancestor")
3451 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,2133 +1,2104 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import util, extensions, hook, error
13 import util, extensions, hook, error
14 import match as match_
14 import match as match_
15 import merge as merge_
15 import merge as merge_
16 from lock import release
16 from lock import release
17 import weakref, stat, errno, os, time, inspect
17 import weakref, stat, errno, os, time, inspect
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19
19
20 class localrepository(repo.repository):
20 class localrepository(repo.repository):
21 capabilities = set(('lookup', 'changegroupsubset'))
21 capabilities = set(('lookup', 'changegroupsubset'))
22 supported = set('revlogv1 store fncache'.split())
22 supported = set('revlogv1 store fncache'.split())
23
23
24 def __init__(self, baseui, path=None, create=0):
24 def __init__(self, baseui, path=None, create=0):
25 repo.repository.__init__(self)
25 repo.repository.__init__(self)
26 self.root = os.path.realpath(path)
26 self.root = os.path.realpath(path)
27 self.path = os.path.join(self.root, ".hg")
27 self.path = os.path.join(self.root, ".hg")
28 self.origroot = path
28 self.origroot = path
29 self.opener = util.opener(self.path)
29 self.opener = util.opener(self.path)
30 self.wopener = util.opener(self.root)
30 self.wopener = util.opener(self.root)
31
31
32 if not os.path.isdir(self.path):
32 if not os.path.isdir(self.path):
33 if create:
33 if create:
34 if not os.path.exists(path):
34 if not os.path.exists(path):
35 os.mkdir(path)
35 os.mkdir(path)
36 os.mkdir(self.path)
36 os.mkdir(self.path)
37 requirements = ["revlogv1"]
37 requirements = ["revlogv1"]
38 if baseui.configbool('format', 'usestore', True):
38 if baseui.configbool('format', 'usestore', True):
39 os.mkdir(os.path.join(self.path, "store"))
39 os.mkdir(os.path.join(self.path, "store"))
40 requirements.append("store")
40 requirements.append("store")
41 if baseui.configbool('format', 'usefncache', True):
41 if baseui.configbool('format', 'usefncache', True):
42 requirements.append("fncache")
42 requirements.append("fncache")
43 # create an invalid changelog
43 # create an invalid changelog
44 self.opener("00changelog.i", "a").write(
44 self.opener("00changelog.i", "a").write(
45 '\0\0\0\2' # represents revlogv2
45 '\0\0\0\2' # represents revlogv2
46 ' dummy changelog to prevent using the old repo layout'
46 ' dummy changelog to prevent using the old repo layout'
47 )
47 )
48 reqfile = self.opener("requires", "w")
48 reqfile = self.opener("requires", "w")
49 for r in requirements:
49 for r in requirements:
50 reqfile.write("%s\n" % r)
50 reqfile.write("%s\n" % r)
51 reqfile.close()
51 reqfile.close()
52 else:
52 else:
53 raise error.RepoError(_("repository %s not found") % path)
53 raise error.RepoError(_("repository %s not found") % path)
54 elif create:
54 elif create:
55 raise error.RepoError(_("repository %s already exists") % path)
55 raise error.RepoError(_("repository %s already exists") % path)
56 else:
56 else:
57 # find requirements
57 # find requirements
58 requirements = set()
58 requirements = set()
59 try:
59 try:
60 requirements = set(self.opener("requires").read().splitlines())
60 requirements = set(self.opener("requires").read().splitlines())
61 except IOError, inst:
61 except IOError, inst:
62 if inst.errno != errno.ENOENT:
62 if inst.errno != errno.ENOENT:
63 raise
63 raise
64 for r in requirements - self.supported:
64 for r in requirements - self.supported:
65 raise error.RepoError(_("requirement '%s' not supported") % r)
65 raise error.RepoError(_("requirement '%s' not supported") % r)
66
66
67 self.store = store.store(requirements, self.path, util.opener)
67 self.store = store.store(requirements, self.path, util.opener)
68 self.spath = self.store.path
68 self.spath = self.store.path
69 self.sopener = self.store.opener
69 self.sopener = self.store.opener
70 self.sjoin = self.store.join
70 self.sjoin = self.store.join
71 self.opener.createmode = self.store.createmode
71 self.opener.createmode = self.store.createmode
72
72
73 self.baseui = baseui
73 self.baseui = baseui
74 self.ui = baseui.copy()
74 self.ui = baseui.copy()
75 try:
75 try:
76 self.ui.readconfig(self.join("hgrc"), self.root)
76 self.ui.readconfig(self.join("hgrc"), self.root)
77 extensions.loadall(self.ui)
77 extensions.loadall(self.ui)
78 except IOError:
78 except IOError:
79 pass
79 pass
80
80
81 self.tagscache = None
81 self.tagscache = None
82 self._tagstypecache = None
82 self._tagstypecache = None
83 self.branchcache = None
83 self.branchcache = None
84 self._ubranchcache = None # UTF-8 version of branchcache
84 self._ubranchcache = None # UTF-8 version of branchcache
85 self._branchcachetip = None
85 self._branchcachetip = None
86 self.nodetagscache = None
86 self.nodetagscache = None
87 self.filterpats = {}
87 self.filterpats = {}
88 self._datafilters = {}
88 self._datafilters = {}
89 self._transref = self._lockref = self._wlockref = None
89 self._transref = self._lockref = self._wlockref = None
90
90
91 @propertycache
91 @propertycache
92 def changelog(self):
92 def changelog(self):
93 c = changelog.changelog(self.sopener)
93 c = changelog.changelog(self.sopener)
94 if 'HG_PENDING' in os.environ:
94 if 'HG_PENDING' in os.environ:
95 p = os.environ['HG_PENDING']
95 p = os.environ['HG_PENDING']
96 if p.startswith(self.root):
96 if p.startswith(self.root):
97 c.readpending('00changelog.i.a')
97 c.readpending('00changelog.i.a')
98 self.sopener.defversion = c.version
98 self.sopener.defversion = c.version
99 return c
99 return c
100
100
101 @propertycache
101 @propertycache
102 def manifest(self):
102 def manifest(self):
103 return manifest.manifest(self.sopener)
103 return manifest.manifest(self.sopener)
104
104
105 @propertycache
105 @propertycache
106 def dirstate(self):
106 def dirstate(self):
107 return dirstate.dirstate(self.opener, self.ui, self.root)
107 return dirstate.dirstate(self.opener, self.ui, self.root)
108
108
109 def __getitem__(self, changeid):
109 def __getitem__(self, changeid):
110 if changeid == None:
110 if changeid == None:
111 return context.workingctx(self)
111 return context.workingctx(self)
112 return context.changectx(self, changeid)
112 return context.changectx(self, changeid)
113
113
114 def __nonzero__(self):
114 def __nonzero__(self):
115 return True
115 return True
116
116
117 def __len__(self):
117 def __len__(self):
118 return len(self.changelog)
118 return len(self.changelog)
119
119
120 def __iter__(self):
120 def __iter__(self):
121 for i in xrange(len(self)):
121 for i in xrange(len(self)):
122 yield i
122 yield i
123
123
124 def url(self):
124 def url(self):
125 return 'file:' + self.root
125 return 'file:' + self.root
126
126
127 def hook(self, name, throw=False, **args):
127 def hook(self, name, throw=False, **args):
128 return hook.hook(self.ui, self, name, throw, **args)
128 return hook.hook(self.ui, self, name, throw, **args)
129
129
130 tag_disallowed = ':\r\n'
130 tag_disallowed = ':\r\n'
131
131
132 def _tag(self, names, node, message, local, user, date, extra={}):
132 def _tag(self, names, node, message, local, user, date, extra={}):
133 if isinstance(names, str):
133 if isinstance(names, str):
134 allchars = names
134 allchars = names
135 names = (names,)
135 names = (names,)
136 else:
136 else:
137 allchars = ''.join(names)
137 allchars = ''.join(names)
138 for c in self.tag_disallowed:
138 for c in self.tag_disallowed:
139 if c in allchars:
139 if c in allchars:
140 raise util.Abort(_('%r cannot be used in a tag name') % c)
140 raise util.Abort(_('%r cannot be used in a tag name') % c)
141
141
142 for name in names:
142 for name in names:
143 self.hook('pretag', throw=True, node=hex(node), tag=name,
143 self.hook('pretag', throw=True, node=hex(node), tag=name,
144 local=local)
144 local=local)
145
145
146 def writetags(fp, names, munge, prevtags):
146 def writetags(fp, names, munge, prevtags):
147 fp.seek(0, 2)
147 fp.seek(0, 2)
148 if prevtags and prevtags[-1] != '\n':
148 if prevtags and prevtags[-1] != '\n':
149 fp.write('\n')
149 fp.write('\n')
150 for name in names:
150 for name in names:
151 m = munge and munge(name) or name
151 m = munge and munge(name) or name
152 if self._tagstypecache and name in self._tagstypecache:
152 if self._tagstypecache and name in self._tagstypecache:
153 old = self.tagscache.get(name, nullid)
153 old = self.tagscache.get(name, nullid)
154 fp.write('%s %s\n' % (hex(old), m))
154 fp.write('%s %s\n' % (hex(old), m))
155 fp.write('%s %s\n' % (hex(node), m))
155 fp.write('%s %s\n' % (hex(node), m))
156 fp.close()
156 fp.close()
157
157
158 prevtags = ''
158 prevtags = ''
159 if local:
159 if local:
160 try:
160 try:
161 fp = self.opener('localtags', 'r+')
161 fp = self.opener('localtags', 'r+')
162 except IOError:
162 except IOError:
163 fp = self.opener('localtags', 'a')
163 fp = self.opener('localtags', 'a')
164 else:
164 else:
165 prevtags = fp.read()
165 prevtags = fp.read()
166
166
167 # local tags are stored in the current charset
167 # local tags are stored in the current charset
168 writetags(fp, names, None, prevtags)
168 writetags(fp, names, None, prevtags)
169 for name in names:
169 for name in names:
170 self.hook('tag', node=hex(node), tag=name, local=local)
170 self.hook('tag', node=hex(node), tag=name, local=local)
171 return
171 return
172
172
173 try:
173 try:
174 fp = self.wfile('.hgtags', 'rb+')
174 fp = self.wfile('.hgtags', 'rb+')
175 except IOError:
175 except IOError:
176 fp = self.wfile('.hgtags', 'ab')
176 fp = self.wfile('.hgtags', 'ab')
177 else:
177 else:
178 prevtags = fp.read()
178 prevtags = fp.read()
179
179
180 # committed tags are stored in UTF-8
180 # committed tags are stored in UTF-8
181 writetags(fp, names, encoding.fromlocal, prevtags)
181 writetags(fp, names, encoding.fromlocal, prevtags)
182
182
183 if '.hgtags' not in self.dirstate:
183 if '.hgtags' not in self.dirstate:
184 self.add(['.hgtags'])
184 self.add(['.hgtags'])
185
185
186 tagnode = self.commit(['.hgtags'], message, user, date, extra=extra)
186 tagnode = self.commit(['.hgtags'], message, user, date, extra=extra)
187
187
188 for name in names:
188 for name in names:
189 self.hook('tag', node=hex(node), tag=name, local=local)
189 self.hook('tag', node=hex(node), tag=name, local=local)
190
190
191 return tagnode
191 return tagnode
192
192
193 def tag(self, names, node, message, local, user, date):
193 def tag(self, names, node, message, local, user, date):
194 '''tag a revision with one or more symbolic names.
194 '''tag a revision with one or more symbolic names.
195
195
196 names is a list of strings or, when adding a single tag, names may be a
196 names is a list of strings or, when adding a single tag, names may be a
197 string.
197 string.
198
198
199 if local is True, the tags are stored in a per-repository file.
199 if local is True, the tags are stored in a per-repository file.
200 otherwise, they are stored in the .hgtags file, and a new
200 otherwise, they are stored in the .hgtags file, and a new
201 changeset is committed with the change.
201 changeset is committed with the change.
202
202
203 keyword arguments:
203 keyword arguments:
204
204
205 local: whether to store tags in non-version-controlled file
205 local: whether to store tags in non-version-controlled file
206 (default False)
206 (default False)
207
207
208 message: commit message to use if committing
208 message: commit message to use if committing
209
209
210 user: name of user to use if committing
210 user: name of user to use if committing
211
211
212 date: date tuple to use if committing'''
212 date: date tuple to use if committing'''
213
213
214 for x in self.status()[:5]:
214 for x in self.status()[:5]:
215 if '.hgtags' in x:
215 if '.hgtags' in x:
216 raise util.Abort(_('working copy of .hgtags is changed '
216 raise util.Abort(_('working copy of .hgtags is changed '
217 '(please commit .hgtags manually)'))
217 '(please commit .hgtags manually)'))
218
218
219 self.tags() # instantiate the cache
219 self.tags() # instantiate the cache
220 self._tag(names, node, message, local, user, date)
220 self._tag(names, node, message, local, user, date)
221
221
222 def tags(self):
222 def tags(self):
223 '''return a mapping of tag to node'''
223 '''return a mapping of tag to node'''
224 if self.tagscache:
224 if self.tagscache:
225 return self.tagscache
225 return self.tagscache
226
226
227 globaltags = {}
227 globaltags = {}
228 tagtypes = {}
228 tagtypes = {}
229
229
230 def readtags(lines, fn, tagtype):
230 def readtags(lines, fn, tagtype):
231 filetags = {}
231 filetags = {}
232 count = 0
232 count = 0
233
233
234 def warn(msg):
234 def warn(msg):
235 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
235 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
236
236
237 for l in lines:
237 for l in lines:
238 count += 1
238 count += 1
239 if not l:
239 if not l:
240 continue
240 continue
241 s = l.split(" ", 1)
241 s = l.split(" ", 1)
242 if len(s) != 2:
242 if len(s) != 2:
243 warn(_("cannot parse entry"))
243 warn(_("cannot parse entry"))
244 continue
244 continue
245 node, key = s
245 node, key = s
246 key = encoding.tolocal(key.strip()) # stored in UTF-8
246 key = encoding.tolocal(key.strip()) # stored in UTF-8
247 try:
247 try:
248 bin_n = bin(node)
248 bin_n = bin(node)
249 except TypeError:
249 except TypeError:
250 warn(_("node '%s' is not well formed") % node)
250 warn(_("node '%s' is not well formed") % node)
251 continue
251 continue
252 if bin_n not in self.changelog.nodemap:
252 if bin_n not in self.changelog.nodemap:
253 warn(_("tag '%s' refers to unknown node") % key)
253 warn(_("tag '%s' refers to unknown node") % key)
254 continue
254 continue
255
255
256 h = []
256 h = []
257 if key in filetags:
257 if key in filetags:
258 n, h = filetags[key]
258 n, h = filetags[key]
259 h.append(n)
259 h.append(n)
260 filetags[key] = (bin_n, h)
260 filetags[key] = (bin_n, h)
261
261
262 for k, nh in filetags.iteritems():
262 for k, nh in filetags.iteritems():
263 if k not in globaltags:
263 if k not in globaltags:
264 globaltags[k] = nh
264 globaltags[k] = nh
265 tagtypes[k] = tagtype
265 tagtypes[k] = tagtype
266 continue
266 continue
267
267
268 # we prefer the global tag if:
268 # we prefer the global tag if:
269 # it supercedes us OR
269 # it supercedes us OR
270 # mutual supercedes and it has a higher rank
270 # mutual supercedes and it has a higher rank
271 # otherwise we win because we're tip-most
271 # otherwise we win because we're tip-most
272 an, ah = nh
272 an, ah = nh
273 bn, bh = globaltags[k]
273 bn, bh = globaltags[k]
274 if (bn != an and an in bh and
274 if (bn != an and an in bh and
275 (bn not in ah or len(bh) > len(ah))):
275 (bn not in ah or len(bh) > len(ah))):
276 an = bn
276 an = bn
277 ah.extend([n for n in bh if n not in ah])
277 ah.extend([n for n in bh if n not in ah])
278 globaltags[k] = an, ah
278 globaltags[k] = an, ah
279 tagtypes[k] = tagtype
279 tagtypes[k] = tagtype
280
280
281 # read the tags file from each head, ending with the tip
281 # read the tags file from each head, ending with the tip
282 f = None
282 f = None
283 for rev, node, fnode in self._hgtagsnodes():
283 for rev, node, fnode in self._hgtagsnodes():
284 f = (f and f.filectx(fnode) or
284 f = (f and f.filectx(fnode) or
285 self.filectx('.hgtags', fileid=fnode))
285 self.filectx('.hgtags', fileid=fnode))
286 readtags(f.data().splitlines(), f, "global")
286 readtags(f.data().splitlines(), f, "global")
287
287
288 try:
288 try:
289 data = encoding.fromlocal(self.opener("localtags").read())
289 data = encoding.fromlocal(self.opener("localtags").read())
290 # localtags are stored in the local character set
290 # localtags are stored in the local character set
291 # while the internal tag table is stored in UTF-8
291 # while the internal tag table is stored in UTF-8
292 readtags(data.splitlines(), "localtags", "local")
292 readtags(data.splitlines(), "localtags", "local")
293 except IOError:
293 except IOError:
294 pass
294 pass
295
295
296 self.tagscache = {}
296 self.tagscache = {}
297 self._tagstypecache = {}
297 self._tagstypecache = {}
298 for k, nh in globaltags.iteritems():
298 for k, nh in globaltags.iteritems():
299 n = nh[0]
299 n = nh[0]
300 if n != nullid:
300 if n != nullid:
301 self.tagscache[k] = n
301 self.tagscache[k] = n
302 self._tagstypecache[k] = tagtypes[k]
302 self._tagstypecache[k] = tagtypes[k]
303 self.tagscache['tip'] = self.changelog.tip()
303 self.tagscache['tip'] = self.changelog.tip()
304 return self.tagscache
304 return self.tagscache
305
305
306 def tagtype(self, tagname):
306 def tagtype(self, tagname):
307 '''
307 '''
308 return the type of the given tag. result can be:
308 return the type of the given tag. result can be:
309
309
310 'local' : a local tag
310 'local' : a local tag
311 'global' : a global tag
311 'global' : a global tag
312 None : tag does not exist
312 None : tag does not exist
313 '''
313 '''
314
314
315 self.tags()
315 self.tags()
316
316
317 return self._tagstypecache.get(tagname)
317 return self._tagstypecache.get(tagname)
318
318
319 def _hgtagsnodes(self):
319 def _hgtagsnodes(self):
320 last = {}
320 last = {}
321 ret = []
321 ret = []
322 for node in reversed(self.heads()):
322 for node in reversed(self.heads()):
323 c = self[node]
323 c = self[node]
324 rev = c.rev()
324 rev = c.rev()
325 try:
325 try:
326 fnode = c.filenode('.hgtags')
326 fnode = c.filenode('.hgtags')
327 except error.LookupError:
327 except error.LookupError:
328 continue
328 continue
329 ret.append((rev, node, fnode))
329 ret.append((rev, node, fnode))
330 if fnode in last:
330 if fnode in last:
331 ret[last[fnode]] = None
331 ret[last[fnode]] = None
332 last[fnode] = len(ret) - 1
332 last[fnode] = len(ret) - 1
333 return [item for item in ret if item]
333 return [item for item in ret if item]
334
334
335 def tagslist(self):
335 def tagslist(self):
336 '''return a list of tags ordered by revision'''
336 '''return a list of tags ordered by revision'''
337 l = []
337 l = []
338 for t, n in self.tags().iteritems():
338 for t, n in self.tags().iteritems():
339 try:
339 try:
340 r = self.changelog.rev(n)
340 r = self.changelog.rev(n)
341 except:
341 except:
342 r = -2 # sort to the beginning of the list if unknown
342 r = -2 # sort to the beginning of the list if unknown
343 l.append((r, t, n))
343 l.append((r, t, n))
344 return [(t, n) for r, t, n in sorted(l)]
344 return [(t, n) for r, t, n in sorted(l)]
345
345
346 def nodetags(self, node):
346 def nodetags(self, node):
347 '''return the tags associated with a node'''
347 '''return the tags associated with a node'''
348 if not self.nodetagscache:
348 if not self.nodetagscache:
349 self.nodetagscache = {}
349 self.nodetagscache = {}
350 for t, n in self.tags().iteritems():
350 for t, n in self.tags().iteritems():
351 self.nodetagscache.setdefault(n, []).append(t)
351 self.nodetagscache.setdefault(n, []).append(t)
352 return self.nodetagscache.get(node, [])
352 return self.nodetagscache.get(node, [])
353
353
354 def _branchtags(self, partial, lrev):
354 def _branchtags(self, partial, lrev):
355 # TODO: rename this function?
355 # TODO: rename this function?
356 tiprev = len(self) - 1
356 tiprev = len(self) - 1
357 if lrev != tiprev:
357 if lrev != tiprev:
358 self._updatebranchcache(partial, lrev+1, tiprev+1)
358 self._updatebranchcache(partial, lrev+1, tiprev+1)
359 self._writebranchcache(partial, self.changelog.tip(), tiprev)
359 self._writebranchcache(partial, self.changelog.tip(), tiprev)
360
360
361 return partial
361 return partial
362
362
363 def _branchheads(self):
363 def _branchheads(self):
364 tip = self.changelog.tip()
364 tip = self.changelog.tip()
365 if self.branchcache is not None and self._branchcachetip == tip:
365 if self.branchcache is not None and self._branchcachetip == tip:
366 return self.branchcache
366 return self.branchcache
367
367
368 oldtip = self._branchcachetip
368 oldtip = self._branchcachetip
369 self._branchcachetip = tip
369 self._branchcachetip = tip
370 if self.branchcache is None:
370 if self.branchcache is None:
371 self.branchcache = {} # avoid recursion in changectx
371 self.branchcache = {} # avoid recursion in changectx
372 else:
372 else:
373 self.branchcache.clear() # keep using the same dict
373 self.branchcache.clear() # keep using the same dict
374 if oldtip is None or oldtip not in self.changelog.nodemap:
374 if oldtip is None or oldtip not in self.changelog.nodemap:
375 partial, last, lrev = self._readbranchcache()
375 partial, last, lrev = self._readbranchcache()
376 else:
376 else:
377 lrev = self.changelog.rev(oldtip)
377 lrev = self.changelog.rev(oldtip)
378 partial = self._ubranchcache
378 partial = self._ubranchcache
379
379
380 self._branchtags(partial, lrev)
380 self._branchtags(partial, lrev)
381 # this private cache holds all heads (not just tips)
381 # this private cache holds all heads (not just tips)
382 self._ubranchcache = partial
382 self._ubranchcache = partial
383
383
384 # the branch cache is stored on disk as UTF-8, but in the local
384 # the branch cache is stored on disk as UTF-8, but in the local
385 # charset internally
385 # charset internally
386 for k, v in partial.iteritems():
386 for k, v in partial.iteritems():
387 self.branchcache[encoding.tolocal(k)] = v
387 self.branchcache[encoding.tolocal(k)] = v
388 return self.branchcache
388 return self.branchcache
389
389
390
390
391 def branchtags(self):
391 def branchtags(self):
392 '''return a dict where branch names map to the tipmost head of
392 '''return a dict where branch names map to the tipmost head of
393 the branch, open heads come before closed'''
393 the branch, open heads come before closed'''
394 bt = {}
394 bt = {}
395 for bn, heads in self._branchheads().iteritems():
395 for bn, heads in self._branchheads().iteritems():
396 head = None
396 head = None
397 for i in range(len(heads)-1, -1, -1):
397 for i in range(len(heads)-1, -1, -1):
398 h = heads[i]
398 h = heads[i]
399 if 'close' not in self.changelog.read(h)[5]:
399 if 'close' not in self.changelog.read(h)[5]:
400 head = h
400 head = h
401 break
401 break
402 # no open heads were found
402 # no open heads were found
403 if head is None:
403 if head is None:
404 head = heads[-1]
404 head = heads[-1]
405 bt[bn] = head
405 bt[bn] = head
406 return bt
406 return bt
407
407
408
408
409 def _readbranchcache(self):
409 def _readbranchcache(self):
410 partial = {}
410 partial = {}
411 try:
411 try:
412 f = self.opener("branchheads.cache")
412 f = self.opener("branchheads.cache")
413 lines = f.read().split('\n')
413 lines = f.read().split('\n')
414 f.close()
414 f.close()
415 except (IOError, OSError):
415 except (IOError, OSError):
416 return {}, nullid, nullrev
416 return {}, nullid, nullrev
417
417
418 try:
418 try:
419 last, lrev = lines.pop(0).split(" ", 1)
419 last, lrev = lines.pop(0).split(" ", 1)
420 last, lrev = bin(last), int(lrev)
420 last, lrev = bin(last), int(lrev)
421 if lrev >= len(self) or self[lrev].node() != last:
421 if lrev >= len(self) or self[lrev].node() != last:
422 # invalidate the cache
422 # invalidate the cache
423 raise ValueError('invalidating branch cache (tip differs)')
423 raise ValueError('invalidating branch cache (tip differs)')
424 for l in lines:
424 for l in lines:
425 if not l: continue
425 if not l: continue
426 node, label = l.split(" ", 1)
426 node, label = l.split(" ", 1)
427 partial.setdefault(label.strip(), []).append(bin(node))
427 partial.setdefault(label.strip(), []).append(bin(node))
428 except KeyboardInterrupt:
428 except KeyboardInterrupt:
429 raise
429 raise
430 except Exception, inst:
430 except Exception, inst:
431 if self.ui.debugflag:
431 if self.ui.debugflag:
432 self.ui.warn(str(inst), '\n')
432 self.ui.warn(str(inst), '\n')
433 partial, last, lrev = {}, nullid, nullrev
433 partial, last, lrev = {}, nullid, nullrev
434 return partial, last, lrev
434 return partial, last, lrev
435
435
436 def _writebranchcache(self, branches, tip, tiprev):
436 def _writebranchcache(self, branches, tip, tiprev):
437 try:
437 try:
438 f = self.opener("branchheads.cache", "w", atomictemp=True)
438 f = self.opener("branchheads.cache", "w", atomictemp=True)
439 f.write("%s %s\n" % (hex(tip), tiprev))
439 f.write("%s %s\n" % (hex(tip), tiprev))
440 for label, nodes in branches.iteritems():
440 for label, nodes in branches.iteritems():
441 for node in nodes:
441 for node in nodes:
442 f.write("%s %s\n" % (hex(node), label))
442 f.write("%s %s\n" % (hex(node), label))
443 f.rename()
443 f.rename()
444 except (IOError, OSError):
444 except (IOError, OSError):
445 pass
445 pass
446
446
447 def _updatebranchcache(self, partial, start, end):
447 def _updatebranchcache(self, partial, start, end):
448 for r in xrange(start, end):
448 for r in xrange(start, end):
449 c = self[r]
449 c = self[r]
450 b = c.branch()
450 b = c.branch()
451 bheads = partial.setdefault(b, [])
451 bheads = partial.setdefault(b, [])
452 bheads.append(c.node())
452 bheads.append(c.node())
453 for p in c.parents():
453 for p in c.parents():
454 pn = p.node()
454 pn = p.node()
455 if pn in bheads:
455 if pn in bheads:
456 bheads.remove(pn)
456 bheads.remove(pn)
457
457
458 def lookup(self, key):
458 def lookup(self, key):
459 if isinstance(key, int):
459 if isinstance(key, int):
460 return self.changelog.node(key)
460 return self.changelog.node(key)
461 elif key == '.':
461 elif key == '.':
462 return self.dirstate.parents()[0]
462 return self.dirstate.parents()[0]
463 elif key == 'null':
463 elif key == 'null':
464 return nullid
464 return nullid
465 elif key == 'tip':
465 elif key == 'tip':
466 return self.changelog.tip()
466 return self.changelog.tip()
467 n = self.changelog._match(key)
467 n = self.changelog._match(key)
468 if n:
468 if n:
469 return n
469 return n
470 if key in self.tags():
470 if key in self.tags():
471 return self.tags()[key]
471 return self.tags()[key]
472 if key in self.branchtags():
472 if key in self.branchtags():
473 return self.branchtags()[key]
473 return self.branchtags()[key]
474 n = self.changelog._partialmatch(key)
474 n = self.changelog._partialmatch(key)
475 if n:
475 if n:
476 return n
476 return n
477 try:
477 try:
478 if len(key) == 20:
478 if len(key) == 20:
479 key = hex(key)
479 key = hex(key)
480 except:
480 except:
481 pass
481 pass
482 raise error.RepoError(_("unknown revision '%s'") % key)
482 raise error.RepoError(_("unknown revision '%s'") % key)
483
483
484 def local(self):
484 def local(self):
485 return True
485 return True
486
486
487 def join(self, f):
487 def join(self, f):
488 return os.path.join(self.path, f)
488 return os.path.join(self.path, f)
489
489
490 def wjoin(self, f):
490 def wjoin(self, f):
491 return os.path.join(self.root, f)
491 return os.path.join(self.root, f)
492
492
493 def rjoin(self, f):
493 def rjoin(self, f):
494 return os.path.join(self.root, util.pconvert(f))
494 return os.path.join(self.root, util.pconvert(f))
495
495
496 def file(self, f):
496 def file(self, f):
497 if f[0] == '/':
497 if f[0] == '/':
498 f = f[1:]
498 f = f[1:]
499 return filelog.filelog(self.sopener, f)
499 return filelog.filelog(self.sopener, f)
500
500
501 def changectx(self, changeid):
501 def changectx(self, changeid):
502 return self[changeid]
502 return self[changeid]
503
503
504 def parents(self, changeid=None):
504 def parents(self, changeid=None):
505 '''get list of changectxs for parents of changeid'''
505 '''get list of changectxs for parents of changeid'''
506 return self[changeid].parents()
506 return self[changeid].parents()
507
507
508 def filectx(self, path, changeid=None, fileid=None):
508 def filectx(self, path, changeid=None, fileid=None):
509 """changeid can be a changeset revision, node, or tag.
509 """changeid can be a changeset revision, node, or tag.
510 fileid can be a file revision or node."""
510 fileid can be a file revision or node."""
511 return context.filectx(self, path, changeid, fileid)
511 return context.filectx(self, path, changeid, fileid)
512
512
513 def getcwd(self):
513 def getcwd(self):
514 return self.dirstate.getcwd()
514 return self.dirstate.getcwd()
515
515
516 def pathto(self, f, cwd=None):
516 def pathto(self, f, cwd=None):
517 return self.dirstate.pathto(f, cwd)
517 return self.dirstate.pathto(f, cwd)
518
518
519 def wfile(self, f, mode='r'):
519 def wfile(self, f, mode='r'):
520 return self.wopener(f, mode)
520 return self.wopener(f, mode)
521
521
522 def _link(self, f):
522 def _link(self, f):
523 return os.path.islink(self.wjoin(f))
523 return os.path.islink(self.wjoin(f))
524
524
525 def _filter(self, filter, filename, data):
525 def _filter(self, filter, filename, data):
526 if filter not in self.filterpats:
526 if filter not in self.filterpats:
527 l = []
527 l = []
528 for pat, cmd in self.ui.configitems(filter):
528 for pat, cmd in self.ui.configitems(filter):
529 if cmd == '!':
529 if cmd == '!':
530 continue
530 continue
531 mf = util.matcher(self.root, "", [pat], [], [])[1]
531 mf = util.matcher(self.root, "", [pat], [], [])[1]
532 fn = None
532 fn = None
533 params = cmd
533 params = cmd
534 for name, filterfn in self._datafilters.iteritems():
534 for name, filterfn in self._datafilters.iteritems():
535 if cmd.startswith(name):
535 if cmd.startswith(name):
536 fn = filterfn
536 fn = filterfn
537 params = cmd[len(name):].lstrip()
537 params = cmd[len(name):].lstrip()
538 break
538 break
539 if not fn:
539 if not fn:
540 fn = lambda s, c, **kwargs: util.filter(s, c)
540 fn = lambda s, c, **kwargs: util.filter(s, c)
541 # Wrap old filters not supporting keyword arguments
541 # Wrap old filters not supporting keyword arguments
542 if not inspect.getargspec(fn)[2]:
542 if not inspect.getargspec(fn)[2]:
543 oldfn = fn
543 oldfn = fn
544 fn = lambda s, c, **kwargs: oldfn(s, c)
544 fn = lambda s, c, **kwargs: oldfn(s, c)
545 l.append((mf, fn, params))
545 l.append((mf, fn, params))
546 self.filterpats[filter] = l
546 self.filterpats[filter] = l
547
547
548 for mf, fn, cmd in self.filterpats[filter]:
548 for mf, fn, cmd in self.filterpats[filter]:
549 if mf(filename):
549 if mf(filename):
550 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
550 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
551 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
551 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
552 break
552 break
553
553
554 return data
554 return data
555
555
556 def adddatafilter(self, name, filter):
556 def adddatafilter(self, name, filter):
557 self._datafilters[name] = filter
557 self._datafilters[name] = filter
558
558
559 def wread(self, filename):
559 def wread(self, filename):
560 if self._link(filename):
560 if self._link(filename):
561 data = os.readlink(self.wjoin(filename))
561 data = os.readlink(self.wjoin(filename))
562 else:
562 else:
563 data = self.wopener(filename, 'r').read()
563 data = self.wopener(filename, 'r').read()
564 return self._filter("encode", filename, data)
564 return self._filter("encode", filename, data)
565
565
566 def wwrite(self, filename, data, flags):
566 def wwrite(self, filename, data, flags):
567 data = self._filter("decode", filename, data)
567 data = self._filter("decode", filename, data)
568 try:
568 try:
569 os.unlink(self.wjoin(filename))
569 os.unlink(self.wjoin(filename))
570 except OSError:
570 except OSError:
571 pass
571 pass
572 if 'l' in flags:
572 if 'l' in flags:
573 self.wopener.symlink(data, filename)
573 self.wopener.symlink(data, filename)
574 else:
574 else:
575 self.wopener(filename, 'w').write(data)
575 self.wopener(filename, 'w').write(data)
576 if 'x' in flags:
576 if 'x' in flags:
577 util.set_flags(self.wjoin(filename), False, True)
577 util.set_flags(self.wjoin(filename), False, True)
578
578
579 def wwritedata(self, filename, data):
579 def wwritedata(self, filename, data):
580 return self._filter("decode", filename, data)
580 return self._filter("decode", filename, data)
581
581
582 def transaction(self):
582 def transaction(self):
583 tr = self._transref and self._transref() or None
583 tr = self._transref and self._transref() or None
584 if tr and tr.running():
584 if tr and tr.running():
585 return tr.nest()
585 return tr.nest()
586
586
587 # abort here if the journal already exists
587 # abort here if the journal already exists
588 if os.path.exists(self.sjoin("journal")):
588 if os.path.exists(self.sjoin("journal")):
589 raise error.RepoError(_("journal already exists - run hg recover"))
589 raise error.RepoError(_("journal already exists - run hg recover"))
590
590
591 # save dirstate for rollback
591 # save dirstate for rollback
592 try:
592 try:
593 ds = self.opener("dirstate").read()
593 ds = self.opener("dirstate").read()
594 except IOError:
594 except IOError:
595 ds = ""
595 ds = ""
596 self.opener("journal.dirstate", "w").write(ds)
596 self.opener("journal.dirstate", "w").write(ds)
597 self.opener("journal.branch", "w").write(self.dirstate.branch())
597 self.opener("journal.branch", "w").write(self.dirstate.branch())
598
598
599 renames = [(self.sjoin("journal"), self.sjoin("undo")),
599 renames = [(self.sjoin("journal"), self.sjoin("undo")),
600 (self.join("journal.dirstate"), self.join("undo.dirstate")),
600 (self.join("journal.dirstate"), self.join("undo.dirstate")),
601 (self.join("journal.branch"), self.join("undo.branch"))]
601 (self.join("journal.branch"), self.join("undo.branch"))]
602 tr = transaction.transaction(self.ui.warn, self.sopener,
602 tr = transaction.transaction(self.ui.warn, self.sopener,
603 self.sjoin("journal"),
603 self.sjoin("journal"),
604 aftertrans(renames),
604 aftertrans(renames),
605 self.store.createmode)
605 self.store.createmode)
606 self._transref = weakref.ref(tr)
606 self._transref = weakref.ref(tr)
607 return tr
607 return tr
608
608
609 def recover(self):
609 def recover(self):
610 lock = self.lock()
610 lock = self.lock()
611 try:
611 try:
612 if os.path.exists(self.sjoin("journal")):
612 if os.path.exists(self.sjoin("journal")):
613 self.ui.status(_("rolling back interrupted transaction\n"))
613 self.ui.status(_("rolling back interrupted transaction\n"))
614 transaction.rollback(self.sopener, self.sjoin("journal"), self.ui.warn)
614 transaction.rollback(self.sopener, self.sjoin("journal"), self.ui.warn)
615 self.invalidate()
615 self.invalidate()
616 return True
616 return True
617 else:
617 else:
618 self.ui.warn(_("no interrupted transaction available\n"))
618 self.ui.warn(_("no interrupted transaction available\n"))
619 return False
619 return False
620 finally:
620 finally:
621 lock.release()
621 lock.release()
622
622
623 def rollback(self):
623 def rollback(self):
624 wlock = lock = None
624 wlock = lock = None
625 try:
625 try:
626 wlock = self.wlock()
626 wlock = self.wlock()
627 lock = self.lock()
627 lock = self.lock()
628 if os.path.exists(self.sjoin("undo")):
628 if os.path.exists(self.sjoin("undo")):
629 self.ui.status(_("rolling back last transaction\n"))
629 self.ui.status(_("rolling back last transaction\n"))
630 transaction.rollback(self.sopener, self.sjoin("undo"), self.ui.warn)
630 transaction.rollback(self.sopener, self.sjoin("undo"), self.ui.warn)
631 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
631 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
632 try:
632 try:
633 branch = self.opener("undo.branch").read()
633 branch = self.opener("undo.branch").read()
634 self.dirstate.setbranch(branch)
634 self.dirstate.setbranch(branch)
635 except IOError:
635 except IOError:
636 self.ui.warn(_("Named branch could not be reset, "
636 self.ui.warn(_("Named branch could not be reset, "
637 "current branch still is: %s\n")
637 "current branch still is: %s\n")
638 % encoding.tolocal(self.dirstate.branch()))
638 % encoding.tolocal(self.dirstate.branch()))
639 self.invalidate()
639 self.invalidate()
640 self.dirstate.invalidate()
640 self.dirstate.invalidate()
641 else:
641 else:
642 self.ui.warn(_("no rollback information available\n"))
642 self.ui.warn(_("no rollback information available\n"))
643 finally:
643 finally:
644 release(lock, wlock)
644 release(lock, wlock)
645
645
646 def invalidate(self):
646 def invalidate(self):
647 for a in "changelog manifest".split():
647 for a in "changelog manifest".split():
648 if a in self.__dict__:
648 if a in self.__dict__:
649 delattr(self, a)
649 delattr(self, a)
650 self.tagscache = None
650 self.tagscache = None
651 self._tagstypecache = None
651 self._tagstypecache = None
652 self.nodetagscache = None
652 self.nodetagscache = None
653 self.branchcache = None
653 self.branchcache = None
654 self._ubranchcache = None
654 self._ubranchcache = None
655 self._branchcachetip = None
655 self._branchcachetip = None
656
656
657 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
657 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
658 try:
658 try:
659 l = lock.lock(lockname, 0, releasefn, desc=desc)
659 l = lock.lock(lockname, 0, releasefn, desc=desc)
660 except error.LockHeld, inst:
660 except error.LockHeld, inst:
661 if not wait:
661 if not wait:
662 raise
662 raise
663 self.ui.warn(_("waiting for lock on %s held by %r\n") %
663 self.ui.warn(_("waiting for lock on %s held by %r\n") %
664 (desc, inst.locker))
664 (desc, inst.locker))
665 # default to 600 seconds timeout
665 # default to 600 seconds timeout
666 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
666 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
667 releasefn, desc=desc)
667 releasefn, desc=desc)
668 if acquirefn:
668 if acquirefn:
669 acquirefn()
669 acquirefn()
670 return l
670 return l
671
671
672 def lock(self, wait=True):
672 def lock(self, wait=True):
673 l = self._lockref and self._lockref()
673 l = self._lockref and self._lockref()
674 if l is not None and l.held:
674 if l is not None and l.held:
675 l.lock()
675 l.lock()
676 return l
676 return l
677
677
678 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
678 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
679 _('repository %s') % self.origroot)
679 _('repository %s') % self.origroot)
680 self._lockref = weakref.ref(l)
680 self._lockref = weakref.ref(l)
681 return l
681 return l
682
682
683 def wlock(self, wait=True):
683 def wlock(self, wait=True):
684 l = self._wlockref and self._wlockref()
684 l = self._wlockref and self._wlockref()
685 if l is not None and l.held:
685 if l is not None and l.held:
686 l.lock()
686 l.lock()
687 return l
687 return l
688
688
689 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
689 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
690 self.dirstate.invalidate, _('working directory of %s') %
690 self.dirstate.invalidate, _('working directory of %s') %
691 self.origroot)
691 self.origroot)
692 self._wlockref = weakref.ref(l)
692 self._wlockref = weakref.ref(l)
693 return l
693 return l
694
694
695 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
695 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
696 """
696 """
697 commit an individual file as part of a larger transaction
697 commit an individual file as part of a larger transaction
698 """
698 """
699
699
700 fname = fctx.path()
700 fname = fctx.path()
701 text = fctx.data()
701 text = fctx.data()
702 flog = self.file(fname)
702 flog = self.file(fname)
703 fparent1 = manifest1.get(fname, nullid)
703 fparent1 = manifest1.get(fname, nullid)
704 fparent2 = fparent2o = manifest2.get(fname, nullid)
704 fparent2 = fparent2o = manifest2.get(fname, nullid)
705
705
706 meta = {}
706 meta = {}
707 copy = fctx.renamed()
707 copy = fctx.renamed()
708 if copy and copy[0] != fname:
708 if copy and copy[0] != fname:
709 # Mark the new revision of this file as a copy of another
709 # Mark the new revision of this file as a copy of another
710 # file. This copy data will effectively act as a parent
710 # file. This copy data will effectively act as a parent
711 # of this new revision. If this is a merge, the first
711 # of this new revision. If this is a merge, the first
712 # parent will be the nullid (meaning "look up the copy data")
712 # parent will be the nullid (meaning "look up the copy data")
713 # and the second one will be the other parent. For example:
713 # and the second one will be the other parent. For example:
714 #
714 #
715 # 0 --- 1 --- 3 rev1 changes file foo
715 # 0 --- 1 --- 3 rev1 changes file foo
716 # \ / rev2 renames foo to bar and changes it
716 # \ / rev2 renames foo to bar and changes it
717 # \- 2 -/ rev3 should have bar with all changes and
717 # \- 2 -/ rev3 should have bar with all changes and
718 # should record that bar descends from
718 # should record that bar descends from
719 # bar in rev2 and foo in rev1
719 # bar in rev2 and foo in rev1
720 #
720 #
721 # this allows this merge to succeed:
721 # this allows this merge to succeed:
722 #
722 #
723 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
723 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
724 # \ / merging rev3 and rev4 should use bar@rev2
724 # \ / merging rev3 and rev4 should use bar@rev2
725 # \- 2 --- 4 as the merge base
725 # \- 2 --- 4 as the merge base
726 #
726 #
727
727
728 cfname = copy[0]
728 cfname = copy[0]
729 crev = manifest1.get(cfname)
729 crev = manifest1.get(cfname)
730 newfparent = fparent2
730 newfparent = fparent2
731
731
732 if manifest2: # branch merge
732 if manifest2: # branch merge
733 if fparent2 == nullid or crev is None: # copied on remote side
733 if fparent2 == nullid or crev is None: # copied on remote side
734 if cfname in manifest2:
734 if cfname in manifest2:
735 crev = manifest2[cfname]
735 crev = manifest2[cfname]
736 newfparent = fparent1
736 newfparent = fparent1
737
737
738 # find source in nearest ancestor if we've lost track
738 # find source in nearest ancestor if we've lost track
739 if not crev:
739 if not crev:
740 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
740 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
741 (fname, cfname))
741 (fname, cfname))
742 for ancestor in self['.'].ancestors():
742 for ancestor in self['.'].ancestors():
743 if cfname in ancestor:
743 if cfname in ancestor:
744 crev = ancestor[cfname].filenode()
744 crev = ancestor[cfname].filenode()
745 break
745 break
746
746
747 self.ui.debug(_(" %s: copy %s:%s\n") % (fname, cfname, hex(crev)))
747 self.ui.debug(_(" %s: copy %s:%s\n") % (fname, cfname, hex(crev)))
748 meta["copy"] = cfname
748 meta["copy"] = cfname
749 meta["copyrev"] = hex(crev)
749 meta["copyrev"] = hex(crev)
750 fparent1, fparent2 = nullid, newfparent
750 fparent1, fparent2 = nullid, newfparent
751 elif fparent2 != nullid:
751 elif fparent2 != nullid:
752 # is one parent an ancestor of the other?
752 # is one parent an ancestor of the other?
753 fparentancestor = flog.ancestor(fparent1, fparent2)
753 fparentancestor = flog.ancestor(fparent1, fparent2)
754 if fparentancestor == fparent1:
754 if fparentancestor == fparent1:
755 fparent1, fparent2 = fparent2, nullid
755 fparent1, fparent2 = fparent2, nullid
756 elif fparentancestor == fparent2:
756 elif fparentancestor == fparent2:
757 fparent2 = nullid
757 fparent2 = nullid
758
758
759 # is the file changed?
759 # is the file changed?
760 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
760 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
761 changelist.append(fname)
761 changelist.append(fname)
762 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
762 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
763
763
764 # are just the flags changed during merge?
764 # are just the flags changed during merge?
765 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
765 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
766 changelist.append(fname)
766 changelist.append(fname)
767
767
768 return fparent1
768 return fparent1
769
769
770 def commit(self, files=None, text="", user=None, date=None, match=None,
770 def commit(self, files=None, text="", user=None, date=None, match=None,
771 force=False, force_editor=False, extra={}, empty_ok=False):
771 force=False, editor=False, extra={}):
772 wlock = lock = None
772 wlock = lock = None
773 if extra.get("close"):
773 if extra.get("close"):
774 force = True
774 force = True
775 if files:
775 if files:
776 files = list(set(files))
776 files = list(set(files))
777
777
778 wlock = self.wlock()
778 wlock = self.wlock()
779 try:
779 try:
780 p1, p2 = self.dirstate.parents()
780 p1, p2 = self.dirstate.parents()
781
781
782 if (not force and p2 != nullid and
782 if (not force and p2 != nullid and
783 (match and (match.files() or match.anypats()))):
783 (match and (match.files() or match.anypats()))):
784 raise util.Abort(_('cannot partially commit a merge '
784 raise util.Abort(_('cannot partially commit a merge '
785 '(do not specify files or patterns)'))
785 '(do not specify files or patterns)'))
786
786
787 if files:
787 if files:
788 modified, removed = [], []
788 modified, removed = [], []
789 for f in files:
789 for f in files:
790 s = self.dirstate[f]
790 s = self.dirstate[f]
791 if s in 'nma':
791 if s in 'nma':
792 modified.append(f)
792 modified.append(f)
793 elif s == 'r':
793 elif s == 'r':
794 removed.append(f)
794 removed.append(f)
795 else:
795 else:
796 self.ui.warn(_("%s not tracked!\n") % f)
796 self.ui.warn(_("%s not tracked!\n") % f)
797 changes = [modified, [], removed, [], []]
797 changes = [modified, [], removed, [], []]
798 else:
798 else:
799 changes = self.status(match=match)
799 changes = self.status(match=match)
800
800
801 if (not (changes[0] or changes[1] or changes[2])
801 if (not (changes[0] or changes[1] or changes[2])
802 and not force and p2 == nullid and
802 and not force and p2 == nullid and
803 self[None].branch() == self['.'].branch()):
803 self[None].branch() == self['.'].branch()):
804 self.ui.status(_("nothing changed\n"))
804 self.ui.status(_("nothing changed\n"))
805 return None
805 return None
806
806
807 ms = merge_.mergestate(self)
807 ms = merge_.mergestate(self)
808 for f in changes[0]:
808 for f in changes[0]:
809 if f in ms and ms[f] == 'u':
809 if f in ms and ms[f] == 'u':
810 raise util.Abort(_("unresolved merge conflicts "
810 raise util.Abort(_("unresolved merge conflicts "
811 "(see hg resolve)"))
811 "(see hg resolve)"))
812 wctx = context.workingctx(self, (p1, p2), text, user, date,
812 wctx = context.workingctx(self, (p1, p2), text, user, date,
813 extra, changes)
813 extra, changes)
814 r = self._commitctx(wctx, force, force_editor, empty_ok, True)
814 r = self._commitctx(wctx, force, editor, True)
815 ms.reset()
815 ms.reset()
816 return r
816 return r
817
817
818 finally:
818 finally:
819 wlock.release()
819 wlock.release()
820
820
821 def commitctx(self, ctx):
821 def commitctx(self, ctx):
822 """Add a new revision to current repository.
822 """Add a new revision to current repository.
823
823
824 Revision information is passed in the context.memctx argument.
824 Revision information is passed in the context.memctx argument.
825 commitctx() does not touch the working directory.
825 commitctx() does not touch the working directory.
826 """
826 """
827 return self._commitctx(ctx, force=True, force_editor=False,
827 return self._commitctx(ctx, force=True, editor=None, working=False)
828 empty_ok=True, working=False)
829
828
830 def _commitctx(self, ctx, force=False, force_editor=False, empty_ok=False,
829 def _commitctx(self, ctx, force=False, editor=None, working=True):
831 working=True):
832 lock = self.lock()
830 lock = self.lock()
833 tr = None
831 tr = None
834 valid = 0 # don't save the dirstate if this isn't set
832 valid = 0 # don't save the dirstate if this isn't set
835 try:
833 try:
836 commit = sorted(ctx.modified() + ctx.added())
834 commit = sorted(ctx.modified() + ctx.added())
837 remove = ctx.removed()
835 remove = ctx.removed()
838 extra = ctx.extra().copy()
836 extra = ctx.extra().copy()
839 branchname = extra['branch']
837 branchname = extra['branch']
840 user = ctx.user()
838 user = ctx.user()
841 text = ctx.description()
839 text = ctx.description()
842
840
843 p1, p2 = [p.node() for p in ctx.parents()]
841 p1, p2 = [p.node() for p in ctx.parents()]
844 c1 = self.changelog.read(p1)
842 c1 = self.changelog.read(p1)
845 c2 = self.changelog.read(p2)
843 c2 = self.changelog.read(p2)
846 m1 = self.manifest.read(c1[0]).copy()
844 m1 = self.manifest.read(c1[0]).copy()
847 m2 = self.manifest.read(c2[0])
845 m2 = self.manifest.read(c2[0])
848
846
849 xp1 = hex(p1)
847 xp1 = hex(p1)
850 if p2 == nullid: xp2 = ''
848 if p2 == nullid: xp2 = ''
851 else: xp2 = hex(p2)
849 else: xp2 = hex(p2)
852
850
853 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
851 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
854
852
855 tr = self.transaction()
853 tr = self.transaction()
856 trp = weakref.proxy(tr)
854 trp = weakref.proxy(tr)
857
855
858 # check in files
856 # check in files
859 new = {}
857 new = {}
860 changed = []
858 changed = []
861 linkrev = len(self)
859 linkrev = len(self)
862 for f in commit:
860 for f in commit:
863 self.ui.note(f + "\n")
861 self.ui.note(f + "\n")
864 try:
862 try:
865 fctx = ctx[f]
863 fctx = ctx[f]
866 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
864 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
867 changed)
865 changed)
868 m1.set(f, fctx.flags())
866 m1.set(f, fctx.flags())
869 if working:
867 if working:
870 self.dirstate.normal(f)
868 self.dirstate.normal(f)
871
869
872 except (OSError, IOError):
870 except (OSError, IOError):
873 if working:
871 if working:
874 self.ui.warn(_("trouble committing %s!\n") % f)
872 self.ui.warn(_("trouble committing %s!\n") % f)
875 raise
873 raise
876 else:
874 else:
877 remove.append(f)
875 remove.append(f)
878
876
879 updated, added = [], []
877 updated, added = [], []
880 for f in sorted(changed):
878 for f in sorted(changed):
881 if f in m1 or f in m2:
879 if f in m1 or f in m2:
882 updated.append(f)
880 updated.append(f)
883 else:
881 else:
884 added.append(f)
882 added.append(f)
885
883
886 # update manifest
884 # update manifest
887 m1.update(new)
885 m1.update(new)
888 removed = [f for f in sorted(remove) if f in m1 or f in m2]
886 removed = [f for f in sorted(remove) if f in m1 or f in m2]
889 removed1 = []
887 removed1 = []
890
888
891 for f in removed:
889 for f in removed:
892 if f in m1:
890 if f in m1:
893 del m1[f]
891 del m1[f]
894 removed1.append(f)
892 removed1.append(f)
895 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
893 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
896 (new, removed1))
894 (new, removed1))
897
895
898 # add changeset
896 if editor:
899 if (not empty_ok and not text) or force_editor:
897 text = editor(self, ctx, added, updated, removed)
900 edittext = []
901 if text:
902 edittext.append(text)
903 edittext.append("")
904 edittext.append("") # Empty line between message and comments.
905 edittext.append(_("HG: Enter commit message."
906 " Lines beginning with 'HG:' are removed."))
907 edittext.append("HG: --")
908 edittext.append(_("HG: user: %s") % user)
909 if p2 != nullid:
910 edittext.append(_("HG: branch merge"))
911 if branchname:
912 edittext.append(_("HG: branch '%s'")
913 % encoding.tolocal(branchname))
914 edittext.extend([_("HG: added %s") % f for f in added])
915 edittext.extend([_("HG: changed %s") % f for f in updated])
916 edittext.extend([_("HG: removed %s") % f for f in removed])
917 if not added and not updated and not removed:
918 edittext.append(_("HG: no files changed"))
919 edittext.append("")
920 # run editor in the repository root
921 olddir = os.getcwd()
922 os.chdir(self.root)
923 text = self.ui.edit("\n".join(edittext), user)
924 os.chdir(olddir)
925
898
926 lines = [line.rstrip() for line in text.rstrip().splitlines()]
899 lines = [line.rstrip() for line in text.rstrip().splitlines()]
927 while lines and not lines[0]:
900 while lines and not lines[0]:
928 del lines[0]
901 del lines[0]
929 if not lines and working:
930 raise util.Abort(_("empty commit message"))
931 text = '\n'.join(lines)
902 text = '\n'.join(lines)
932
903
933 self.changelog.delayupdate()
904 self.changelog.delayupdate()
934 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
905 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
935 user, ctx.date(), extra)
906 user, ctx.date(), extra)
936 p = lambda: self.changelog.writepending() and self.root or ""
907 p = lambda: self.changelog.writepending() and self.root or ""
937 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
908 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
938 parent2=xp2, pending=p)
909 parent2=xp2, pending=p)
939 self.changelog.finalize(trp)
910 self.changelog.finalize(trp)
940 tr.close()
911 tr.close()
941
912
942 if self.branchcache:
913 if self.branchcache:
943 self.branchtags()
914 self.branchtags()
944
915
945 if working:
916 if working:
946 self.dirstate.setparents(n)
917 self.dirstate.setparents(n)
947 for f in removed:
918 for f in removed:
948 self.dirstate.forget(f)
919 self.dirstate.forget(f)
949 valid = 1 # our dirstate updates are complete
920 valid = 1 # our dirstate updates are complete
950
921
951 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
922 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
952 return n
923 return n
953 finally:
924 finally:
954 if not valid: # don't save our updated dirstate
925 if not valid: # don't save our updated dirstate
955 self.dirstate.invalidate()
926 self.dirstate.invalidate()
956 del tr
927 del tr
957 lock.release()
928 lock.release()
958
929
959 def walk(self, match, node=None):
930 def walk(self, match, node=None):
960 '''
931 '''
961 walk recursively through the directory tree or a given
932 walk recursively through the directory tree or a given
962 changeset, finding all files matched by the match
933 changeset, finding all files matched by the match
963 function
934 function
964 '''
935 '''
965 return self[node].walk(match)
936 return self[node].walk(match)
966
937
967 def status(self, node1='.', node2=None, match=None,
938 def status(self, node1='.', node2=None, match=None,
968 ignored=False, clean=False, unknown=False):
939 ignored=False, clean=False, unknown=False):
969 """return status of files between two nodes or node and working directory
940 """return status of files between two nodes or node and working directory
970
941
971 If node1 is None, use the first dirstate parent instead.
942 If node1 is None, use the first dirstate parent instead.
972 If node2 is None, compare node1 with working directory.
943 If node2 is None, compare node1 with working directory.
973 """
944 """
974
945
975 def mfmatches(ctx):
946 def mfmatches(ctx):
976 mf = ctx.manifest().copy()
947 mf = ctx.manifest().copy()
977 for fn in mf.keys():
948 for fn in mf.keys():
978 if not match(fn):
949 if not match(fn):
979 del mf[fn]
950 del mf[fn]
980 return mf
951 return mf
981
952
982 if isinstance(node1, context.changectx):
953 if isinstance(node1, context.changectx):
983 ctx1 = node1
954 ctx1 = node1
984 else:
955 else:
985 ctx1 = self[node1]
956 ctx1 = self[node1]
986 if isinstance(node2, context.changectx):
957 if isinstance(node2, context.changectx):
987 ctx2 = node2
958 ctx2 = node2
988 else:
959 else:
989 ctx2 = self[node2]
960 ctx2 = self[node2]
990
961
991 working = ctx2.rev() is None
962 working = ctx2.rev() is None
992 parentworking = working and ctx1 == self['.']
963 parentworking = working and ctx1 == self['.']
993 match = match or match_.always(self.root, self.getcwd())
964 match = match or match_.always(self.root, self.getcwd())
994 listignored, listclean, listunknown = ignored, clean, unknown
965 listignored, listclean, listunknown = ignored, clean, unknown
995
966
996 # load earliest manifest first for caching reasons
967 # load earliest manifest first for caching reasons
997 if not working and ctx2.rev() < ctx1.rev():
968 if not working and ctx2.rev() < ctx1.rev():
998 ctx2.manifest()
969 ctx2.manifest()
999
970
1000 if not parentworking:
971 if not parentworking:
1001 def bad(f, msg):
972 def bad(f, msg):
1002 if f not in ctx1:
973 if f not in ctx1:
1003 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
974 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1004 return False
975 return False
1005 match.bad = bad
976 match.bad = bad
1006
977
1007 if working: # we need to scan the working dir
978 if working: # we need to scan the working dir
1008 s = self.dirstate.status(match, listignored, listclean, listunknown)
979 s = self.dirstate.status(match, listignored, listclean, listunknown)
1009 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
980 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1010
981
1011 # check for any possibly clean files
982 # check for any possibly clean files
1012 if parentworking and cmp:
983 if parentworking and cmp:
1013 fixup = []
984 fixup = []
1014 # do a full compare of any files that might have changed
985 # do a full compare of any files that might have changed
1015 for f in sorted(cmp):
986 for f in sorted(cmp):
1016 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
987 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1017 or ctx1[f].cmp(ctx2[f].data())):
988 or ctx1[f].cmp(ctx2[f].data())):
1018 modified.append(f)
989 modified.append(f)
1019 else:
990 else:
1020 fixup.append(f)
991 fixup.append(f)
1021
992
1022 if listclean:
993 if listclean:
1023 clean += fixup
994 clean += fixup
1024
995
1025 # update dirstate for files that are actually clean
996 # update dirstate for files that are actually clean
1026 if fixup:
997 if fixup:
1027 wlock = None
998 wlock = None
1028 try:
999 try:
1029 try:
1000 try:
1030 # updating the dirstate is optional
1001 # updating the dirstate is optional
1031 # so we don't wait on the lock
1002 # so we don't wait on the lock
1032 wlock = self.wlock(False)
1003 wlock = self.wlock(False)
1033 for f in fixup:
1004 for f in fixup:
1034 self.dirstate.normal(f)
1005 self.dirstate.normal(f)
1035 except error.LockError:
1006 except error.LockError:
1036 pass
1007 pass
1037 finally:
1008 finally:
1038 release(wlock)
1009 release(wlock)
1039
1010
1040 if not parentworking:
1011 if not parentworking:
1041 mf1 = mfmatches(ctx1)
1012 mf1 = mfmatches(ctx1)
1042 if working:
1013 if working:
1043 # we are comparing working dir against non-parent
1014 # we are comparing working dir against non-parent
1044 # generate a pseudo-manifest for the working dir
1015 # generate a pseudo-manifest for the working dir
1045 mf2 = mfmatches(self['.'])
1016 mf2 = mfmatches(self['.'])
1046 for f in cmp + modified + added:
1017 for f in cmp + modified + added:
1047 mf2[f] = None
1018 mf2[f] = None
1048 mf2.set(f, ctx2.flags(f))
1019 mf2.set(f, ctx2.flags(f))
1049 for f in removed:
1020 for f in removed:
1050 if f in mf2:
1021 if f in mf2:
1051 del mf2[f]
1022 del mf2[f]
1052 else:
1023 else:
1053 # we are comparing two revisions
1024 # we are comparing two revisions
1054 deleted, unknown, ignored = [], [], []
1025 deleted, unknown, ignored = [], [], []
1055 mf2 = mfmatches(ctx2)
1026 mf2 = mfmatches(ctx2)
1056
1027
1057 modified, added, clean = [], [], []
1028 modified, added, clean = [], [], []
1058 for fn in mf2:
1029 for fn in mf2:
1059 if fn in mf1:
1030 if fn in mf1:
1060 if (mf1.flags(fn) != mf2.flags(fn) or
1031 if (mf1.flags(fn) != mf2.flags(fn) or
1061 (mf1[fn] != mf2[fn] and
1032 (mf1[fn] != mf2[fn] and
1062 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1033 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1063 modified.append(fn)
1034 modified.append(fn)
1064 elif listclean:
1035 elif listclean:
1065 clean.append(fn)
1036 clean.append(fn)
1066 del mf1[fn]
1037 del mf1[fn]
1067 else:
1038 else:
1068 added.append(fn)
1039 added.append(fn)
1069 removed = mf1.keys()
1040 removed = mf1.keys()
1070
1041
1071 r = modified, added, removed, deleted, unknown, ignored, clean
1042 r = modified, added, removed, deleted, unknown, ignored, clean
1072 [l.sort() for l in r]
1043 [l.sort() for l in r]
1073 return r
1044 return r
1074
1045
1075 def add(self, list):
1046 def add(self, list):
1076 wlock = self.wlock()
1047 wlock = self.wlock()
1077 try:
1048 try:
1078 rejected = []
1049 rejected = []
1079 for f in list:
1050 for f in list:
1080 p = self.wjoin(f)
1051 p = self.wjoin(f)
1081 try:
1052 try:
1082 st = os.lstat(p)
1053 st = os.lstat(p)
1083 except:
1054 except:
1084 self.ui.warn(_("%s does not exist!\n") % f)
1055 self.ui.warn(_("%s does not exist!\n") % f)
1085 rejected.append(f)
1056 rejected.append(f)
1086 continue
1057 continue
1087 if st.st_size > 10000000:
1058 if st.st_size > 10000000:
1088 self.ui.warn(_("%s: files over 10MB may cause memory and"
1059 self.ui.warn(_("%s: files over 10MB may cause memory and"
1089 " performance problems\n"
1060 " performance problems\n"
1090 "(use 'hg revert %s' to unadd the file)\n")
1061 "(use 'hg revert %s' to unadd the file)\n")
1091 % (f, f))
1062 % (f, f))
1092 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1063 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1093 self.ui.warn(_("%s not added: only files and symlinks "
1064 self.ui.warn(_("%s not added: only files and symlinks "
1094 "supported currently\n") % f)
1065 "supported currently\n") % f)
1095 rejected.append(p)
1066 rejected.append(p)
1096 elif self.dirstate[f] in 'amn':
1067 elif self.dirstate[f] in 'amn':
1097 self.ui.warn(_("%s already tracked!\n") % f)
1068 self.ui.warn(_("%s already tracked!\n") % f)
1098 elif self.dirstate[f] == 'r':
1069 elif self.dirstate[f] == 'r':
1099 self.dirstate.normallookup(f)
1070 self.dirstate.normallookup(f)
1100 else:
1071 else:
1101 self.dirstate.add(f)
1072 self.dirstate.add(f)
1102 return rejected
1073 return rejected
1103 finally:
1074 finally:
1104 wlock.release()
1075 wlock.release()
1105
1076
1106 def forget(self, list):
1077 def forget(self, list):
1107 wlock = self.wlock()
1078 wlock = self.wlock()
1108 try:
1079 try:
1109 for f in list:
1080 for f in list:
1110 if self.dirstate[f] != 'a':
1081 if self.dirstate[f] != 'a':
1111 self.ui.warn(_("%s not added!\n") % f)
1082 self.ui.warn(_("%s not added!\n") % f)
1112 else:
1083 else:
1113 self.dirstate.forget(f)
1084 self.dirstate.forget(f)
1114 finally:
1085 finally:
1115 wlock.release()
1086 wlock.release()
1116
1087
1117 def remove(self, list, unlink=False):
1088 def remove(self, list, unlink=False):
1118 wlock = None
1089 wlock = None
1119 try:
1090 try:
1120 if unlink:
1091 if unlink:
1121 for f in list:
1092 for f in list:
1122 try:
1093 try:
1123 util.unlink(self.wjoin(f))
1094 util.unlink(self.wjoin(f))
1124 except OSError, inst:
1095 except OSError, inst:
1125 if inst.errno != errno.ENOENT:
1096 if inst.errno != errno.ENOENT:
1126 raise
1097 raise
1127 wlock = self.wlock()
1098 wlock = self.wlock()
1128 for f in list:
1099 for f in list:
1129 if unlink and os.path.exists(self.wjoin(f)):
1100 if unlink and os.path.exists(self.wjoin(f)):
1130 self.ui.warn(_("%s still exists!\n") % f)
1101 self.ui.warn(_("%s still exists!\n") % f)
1131 elif self.dirstate[f] == 'a':
1102 elif self.dirstate[f] == 'a':
1132 self.dirstate.forget(f)
1103 self.dirstate.forget(f)
1133 elif f not in self.dirstate:
1104 elif f not in self.dirstate:
1134 self.ui.warn(_("%s not tracked!\n") % f)
1105 self.ui.warn(_("%s not tracked!\n") % f)
1135 else:
1106 else:
1136 self.dirstate.remove(f)
1107 self.dirstate.remove(f)
1137 finally:
1108 finally:
1138 release(wlock)
1109 release(wlock)
1139
1110
1140 def undelete(self, list):
1111 def undelete(self, list):
1141 manifests = [self.manifest.read(self.changelog.read(p)[0])
1112 manifests = [self.manifest.read(self.changelog.read(p)[0])
1142 for p in self.dirstate.parents() if p != nullid]
1113 for p in self.dirstate.parents() if p != nullid]
1143 wlock = self.wlock()
1114 wlock = self.wlock()
1144 try:
1115 try:
1145 for f in list:
1116 for f in list:
1146 if self.dirstate[f] != 'r':
1117 if self.dirstate[f] != 'r':
1147 self.ui.warn(_("%s not removed!\n") % f)
1118 self.ui.warn(_("%s not removed!\n") % f)
1148 else:
1119 else:
1149 m = f in manifests[0] and manifests[0] or manifests[1]
1120 m = f in manifests[0] and manifests[0] or manifests[1]
1150 t = self.file(f).read(m[f])
1121 t = self.file(f).read(m[f])
1151 self.wwrite(f, t, m.flags(f))
1122 self.wwrite(f, t, m.flags(f))
1152 self.dirstate.normal(f)
1123 self.dirstate.normal(f)
1153 finally:
1124 finally:
1154 wlock.release()
1125 wlock.release()
1155
1126
1156 def copy(self, source, dest):
1127 def copy(self, source, dest):
1157 p = self.wjoin(dest)
1128 p = self.wjoin(dest)
1158 if not (os.path.exists(p) or os.path.islink(p)):
1129 if not (os.path.exists(p) or os.path.islink(p)):
1159 self.ui.warn(_("%s does not exist!\n") % dest)
1130 self.ui.warn(_("%s does not exist!\n") % dest)
1160 elif not (os.path.isfile(p) or os.path.islink(p)):
1131 elif not (os.path.isfile(p) or os.path.islink(p)):
1161 self.ui.warn(_("copy failed: %s is not a file or a "
1132 self.ui.warn(_("copy failed: %s is not a file or a "
1162 "symbolic link\n") % dest)
1133 "symbolic link\n") % dest)
1163 else:
1134 else:
1164 wlock = self.wlock()
1135 wlock = self.wlock()
1165 try:
1136 try:
1166 if self.dirstate[dest] in '?r':
1137 if self.dirstate[dest] in '?r':
1167 self.dirstate.add(dest)
1138 self.dirstate.add(dest)
1168 self.dirstate.copy(source, dest)
1139 self.dirstate.copy(source, dest)
1169 finally:
1140 finally:
1170 wlock.release()
1141 wlock.release()
1171
1142
1172 def heads(self, start=None, closed=True):
1143 def heads(self, start=None, closed=True):
1173 heads = self.changelog.heads(start)
1144 heads = self.changelog.heads(start)
1174 def display(head):
1145 def display(head):
1175 if closed:
1146 if closed:
1176 return True
1147 return True
1177 extras = self.changelog.read(head)[5]
1148 extras = self.changelog.read(head)[5]
1178 return ('close' not in extras)
1149 return ('close' not in extras)
1179 # sort the output in rev descending order
1150 # sort the output in rev descending order
1180 heads = [(-self.changelog.rev(h), h) for h in heads if display(h)]
1151 heads = [(-self.changelog.rev(h), h) for h in heads if display(h)]
1181 return [n for (r, n) in sorted(heads)]
1152 return [n for (r, n) in sorted(heads)]
1182
1153
1183 def branchheads(self, branch=None, start=None, closed=True):
1154 def branchheads(self, branch=None, start=None, closed=True):
1184 if branch is None:
1155 if branch is None:
1185 branch = self[None].branch()
1156 branch = self[None].branch()
1186 branches = self._branchheads()
1157 branches = self._branchheads()
1187 if branch not in branches:
1158 if branch not in branches:
1188 return []
1159 return []
1189 bheads = branches[branch]
1160 bheads = branches[branch]
1190 # the cache returns heads ordered lowest to highest
1161 # the cache returns heads ordered lowest to highest
1191 bheads.reverse()
1162 bheads.reverse()
1192 if start is not None:
1163 if start is not None:
1193 # filter out the heads that cannot be reached from startrev
1164 # filter out the heads that cannot be reached from startrev
1194 bheads = self.changelog.nodesbetween([start], bheads)[2]
1165 bheads = self.changelog.nodesbetween([start], bheads)[2]
1195 if not closed:
1166 if not closed:
1196 bheads = [h for h in bheads if
1167 bheads = [h for h in bheads if
1197 ('close' not in self.changelog.read(h)[5])]
1168 ('close' not in self.changelog.read(h)[5])]
1198 return bheads
1169 return bheads
1199
1170
1200 def branches(self, nodes):
1171 def branches(self, nodes):
1201 if not nodes:
1172 if not nodes:
1202 nodes = [self.changelog.tip()]
1173 nodes = [self.changelog.tip()]
1203 b = []
1174 b = []
1204 for n in nodes:
1175 for n in nodes:
1205 t = n
1176 t = n
1206 while 1:
1177 while 1:
1207 p = self.changelog.parents(n)
1178 p = self.changelog.parents(n)
1208 if p[1] != nullid or p[0] == nullid:
1179 if p[1] != nullid or p[0] == nullid:
1209 b.append((t, n, p[0], p[1]))
1180 b.append((t, n, p[0], p[1]))
1210 break
1181 break
1211 n = p[0]
1182 n = p[0]
1212 return b
1183 return b
1213
1184
1214 def between(self, pairs):
1185 def between(self, pairs):
1215 r = []
1186 r = []
1216
1187
1217 for top, bottom in pairs:
1188 for top, bottom in pairs:
1218 n, l, i = top, [], 0
1189 n, l, i = top, [], 0
1219 f = 1
1190 f = 1
1220
1191
1221 while n != bottom and n != nullid:
1192 while n != bottom and n != nullid:
1222 p = self.changelog.parents(n)[0]
1193 p = self.changelog.parents(n)[0]
1223 if i == f:
1194 if i == f:
1224 l.append(n)
1195 l.append(n)
1225 f = f * 2
1196 f = f * 2
1226 n = p
1197 n = p
1227 i += 1
1198 i += 1
1228
1199
1229 r.append(l)
1200 r.append(l)
1230
1201
1231 return r
1202 return r
1232
1203
1233 def findincoming(self, remote, base=None, heads=None, force=False):
1204 def findincoming(self, remote, base=None, heads=None, force=False):
1234 """Return list of roots of the subsets of missing nodes from remote
1205 """Return list of roots of the subsets of missing nodes from remote
1235
1206
1236 If base dict is specified, assume that these nodes and their parents
1207 If base dict is specified, assume that these nodes and their parents
1237 exist on the remote side and that no child of a node of base exists
1208 exist on the remote side and that no child of a node of base exists
1238 in both remote and self.
1209 in both remote and self.
1239 Furthermore base will be updated to include the nodes that exists
1210 Furthermore base will be updated to include the nodes that exists
1240 in self and remote but no children exists in self and remote.
1211 in self and remote but no children exists in self and remote.
1241 If a list of heads is specified, return only nodes which are heads
1212 If a list of heads is specified, return only nodes which are heads
1242 or ancestors of these heads.
1213 or ancestors of these heads.
1243
1214
1244 All the ancestors of base are in self and in remote.
1215 All the ancestors of base are in self and in remote.
1245 All the descendants of the list returned are missing in self.
1216 All the descendants of the list returned are missing in self.
1246 (and so we know that the rest of the nodes are missing in remote, see
1217 (and so we know that the rest of the nodes are missing in remote, see
1247 outgoing)
1218 outgoing)
1248 """
1219 """
1249 return self.findcommonincoming(remote, base, heads, force)[1]
1220 return self.findcommonincoming(remote, base, heads, force)[1]
1250
1221
1251 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1222 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1252 """Return a tuple (common, missing roots, heads) used to identify
1223 """Return a tuple (common, missing roots, heads) used to identify
1253 missing nodes from remote.
1224 missing nodes from remote.
1254
1225
1255 If base dict is specified, assume that these nodes and their parents
1226 If base dict is specified, assume that these nodes and their parents
1256 exist on the remote side and that no child of a node of base exists
1227 exist on the remote side and that no child of a node of base exists
1257 in both remote and self.
1228 in both remote and self.
1258 Furthermore base will be updated to include the nodes that exists
1229 Furthermore base will be updated to include the nodes that exists
1259 in self and remote but no children exists in self and remote.
1230 in self and remote but no children exists in self and remote.
1260 If a list of heads is specified, return only nodes which are heads
1231 If a list of heads is specified, return only nodes which are heads
1261 or ancestors of these heads.
1232 or ancestors of these heads.
1262
1233
1263 All the ancestors of base are in self and in remote.
1234 All the ancestors of base are in self and in remote.
1264 """
1235 """
1265 m = self.changelog.nodemap
1236 m = self.changelog.nodemap
1266 search = []
1237 search = []
1267 fetch = set()
1238 fetch = set()
1268 seen = set()
1239 seen = set()
1269 seenbranch = set()
1240 seenbranch = set()
1270 if base == None:
1241 if base == None:
1271 base = {}
1242 base = {}
1272
1243
1273 if not heads:
1244 if not heads:
1274 heads = remote.heads()
1245 heads = remote.heads()
1275
1246
1276 if self.changelog.tip() == nullid:
1247 if self.changelog.tip() == nullid:
1277 base[nullid] = 1
1248 base[nullid] = 1
1278 if heads != [nullid]:
1249 if heads != [nullid]:
1279 return [nullid], [nullid], list(heads)
1250 return [nullid], [nullid], list(heads)
1280 return [nullid], [], []
1251 return [nullid], [], []
1281
1252
1282 # assume we're closer to the tip than the root
1253 # assume we're closer to the tip than the root
1283 # and start by examining the heads
1254 # and start by examining the heads
1284 self.ui.status(_("searching for changes\n"))
1255 self.ui.status(_("searching for changes\n"))
1285
1256
1286 unknown = []
1257 unknown = []
1287 for h in heads:
1258 for h in heads:
1288 if h not in m:
1259 if h not in m:
1289 unknown.append(h)
1260 unknown.append(h)
1290 else:
1261 else:
1291 base[h] = 1
1262 base[h] = 1
1292
1263
1293 heads = unknown
1264 heads = unknown
1294 if not unknown:
1265 if not unknown:
1295 return base.keys(), [], []
1266 return base.keys(), [], []
1296
1267
1297 req = set(unknown)
1268 req = set(unknown)
1298 reqcnt = 0
1269 reqcnt = 0
1299
1270
1300 # search through remote branches
1271 # search through remote branches
1301 # a 'branch' here is a linear segment of history, with four parts:
1272 # a 'branch' here is a linear segment of history, with four parts:
1302 # head, root, first parent, second parent
1273 # head, root, first parent, second parent
1303 # (a branch always has two parents (or none) by definition)
1274 # (a branch always has two parents (or none) by definition)
1304 unknown = remote.branches(unknown)
1275 unknown = remote.branches(unknown)
1305 while unknown:
1276 while unknown:
1306 r = []
1277 r = []
1307 while unknown:
1278 while unknown:
1308 n = unknown.pop(0)
1279 n = unknown.pop(0)
1309 if n[0] in seen:
1280 if n[0] in seen:
1310 continue
1281 continue
1311
1282
1312 self.ui.debug(_("examining %s:%s\n")
1283 self.ui.debug(_("examining %s:%s\n")
1313 % (short(n[0]), short(n[1])))
1284 % (short(n[0]), short(n[1])))
1314 if n[0] == nullid: # found the end of the branch
1285 if n[0] == nullid: # found the end of the branch
1315 pass
1286 pass
1316 elif n in seenbranch:
1287 elif n in seenbranch:
1317 self.ui.debug(_("branch already found\n"))
1288 self.ui.debug(_("branch already found\n"))
1318 continue
1289 continue
1319 elif n[1] and n[1] in m: # do we know the base?
1290 elif n[1] and n[1] in m: # do we know the base?
1320 self.ui.debug(_("found incomplete branch %s:%s\n")
1291 self.ui.debug(_("found incomplete branch %s:%s\n")
1321 % (short(n[0]), short(n[1])))
1292 % (short(n[0]), short(n[1])))
1322 search.append(n[0:2]) # schedule branch range for scanning
1293 search.append(n[0:2]) # schedule branch range for scanning
1323 seenbranch.add(n)
1294 seenbranch.add(n)
1324 else:
1295 else:
1325 if n[1] not in seen and n[1] not in fetch:
1296 if n[1] not in seen and n[1] not in fetch:
1326 if n[2] in m and n[3] in m:
1297 if n[2] in m and n[3] in m:
1327 self.ui.debug(_("found new changeset %s\n") %
1298 self.ui.debug(_("found new changeset %s\n") %
1328 short(n[1]))
1299 short(n[1]))
1329 fetch.add(n[1]) # earliest unknown
1300 fetch.add(n[1]) # earliest unknown
1330 for p in n[2:4]:
1301 for p in n[2:4]:
1331 if p in m:
1302 if p in m:
1332 base[p] = 1 # latest known
1303 base[p] = 1 # latest known
1333
1304
1334 for p in n[2:4]:
1305 for p in n[2:4]:
1335 if p not in req and p not in m:
1306 if p not in req and p not in m:
1336 r.append(p)
1307 r.append(p)
1337 req.add(p)
1308 req.add(p)
1338 seen.add(n[0])
1309 seen.add(n[0])
1339
1310
1340 if r:
1311 if r:
1341 reqcnt += 1
1312 reqcnt += 1
1342 self.ui.debug(_("request %d: %s\n") %
1313 self.ui.debug(_("request %d: %s\n") %
1343 (reqcnt, " ".join(map(short, r))))
1314 (reqcnt, " ".join(map(short, r))))
1344 for p in xrange(0, len(r), 10):
1315 for p in xrange(0, len(r), 10):
1345 for b in remote.branches(r[p:p+10]):
1316 for b in remote.branches(r[p:p+10]):
1346 self.ui.debug(_("received %s:%s\n") %
1317 self.ui.debug(_("received %s:%s\n") %
1347 (short(b[0]), short(b[1])))
1318 (short(b[0]), short(b[1])))
1348 unknown.append(b)
1319 unknown.append(b)
1349
1320
1350 # do binary search on the branches we found
1321 # do binary search on the branches we found
1351 while search:
1322 while search:
1352 newsearch = []
1323 newsearch = []
1353 reqcnt += 1
1324 reqcnt += 1
1354 for n, l in zip(search, remote.between(search)):
1325 for n, l in zip(search, remote.between(search)):
1355 l.append(n[1])
1326 l.append(n[1])
1356 p = n[0]
1327 p = n[0]
1357 f = 1
1328 f = 1
1358 for i in l:
1329 for i in l:
1359 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1330 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1360 if i in m:
1331 if i in m:
1361 if f <= 2:
1332 if f <= 2:
1362 self.ui.debug(_("found new branch changeset %s\n") %
1333 self.ui.debug(_("found new branch changeset %s\n") %
1363 short(p))
1334 short(p))
1364 fetch.add(p)
1335 fetch.add(p)
1365 base[i] = 1
1336 base[i] = 1
1366 else:
1337 else:
1367 self.ui.debug(_("narrowed branch search to %s:%s\n")
1338 self.ui.debug(_("narrowed branch search to %s:%s\n")
1368 % (short(p), short(i)))
1339 % (short(p), short(i)))
1369 newsearch.append((p, i))
1340 newsearch.append((p, i))
1370 break
1341 break
1371 p, f = i, f * 2
1342 p, f = i, f * 2
1372 search = newsearch
1343 search = newsearch
1373
1344
1374 # sanity check our fetch list
1345 # sanity check our fetch list
1375 for f in fetch:
1346 for f in fetch:
1376 if f in m:
1347 if f in m:
1377 raise error.RepoError(_("already have changeset ")
1348 raise error.RepoError(_("already have changeset ")
1378 + short(f[:4]))
1349 + short(f[:4]))
1379
1350
1380 if base.keys() == [nullid]:
1351 if base.keys() == [nullid]:
1381 if force:
1352 if force:
1382 self.ui.warn(_("warning: repository is unrelated\n"))
1353 self.ui.warn(_("warning: repository is unrelated\n"))
1383 else:
1354 else:
1384 raise util.Abort(_("repository is unrelated"))
1355 raise util.Abort(_("repository is unrelated"))
1385
1356
1386 self.ui.debug(_("found new changesets starting at ") +
1357 self.ui.debug(_("found new changesets starting at ") +
1387 " ".join([short(f) for f in fetch]) + "\n")
1358 " ".join([short(f) for f in fetch]) + "\n")
1388
1359
1389 self.ui.debug(_("%d total queries\n") % reqcnt)
1360 self.ui.debug(_("%d total queries\n") % reqcnt)
1390
1361
1391 return base.keys(), list(fetch), heads
1362 return base.keys(), list(fetch), heads
1392
1363
1393 def findoutgoing(self, remote, base=None, heads=None, force=False):
1364 def findoutgoing(self, remote, base=None, heads=None, force=False):
1394 """Return list of nodes that are roots of subsets not in remote
1365 """Return list of nodes that are roots of subsets not in remote
1395
1366
1396 If base dict is specified, assume that these nodes and their parents
1367 If base dict is specified, assume that these nodes and their parents
1397 exist on the remote side.
1368 exist on the remote side.
1398 If a list of heads is specified, return only nodes which are heads
1369 If a list of heads is specified, return only nodes which are heads
1399 or ancestors of these heads, and return a second element which
1370 or ancestors of these heads, and return a second element which
1400 contains all remote heads which get new children.
1371 contains all remote heads which get new children.
1401 """
1372 """
1402 if base == None:
1373 if base == None:
1403 base = {}
1374 base = {}
1404 self.findincoming(remote, base, heads, force=force)
1375 self.findincoming(remote, base, heads, force=force)
1405
1376
1406 self.ui.debug(_("common changesets up to ")
1377 self.ui.debug(_("common changesets up to ")
1407 + " ".join(map(short, base.keys())) + "\n")
1378 + " ".join(map(short, base.keys())) + "\n")
1408
1379
1409 remain = set(self.changelog.nodemap)
1380 remain = set(self.changelog.nodemap)
1410
1381
1411 # prune everything remote has from the tree
1382 # prune everything remote has from the tree
1412 remain.remove(nullid)
1383 remain.remove(nullid)
1413 remove = base.keys()
1384 remove = base.keys()
1414 while remove:
1385 while remove:
1415 n = remove.pop(0)
1386 n = remove.pop(0)
1416 if n in remain:
1387 if n in remain:
1417 remain.remove(n)
1388 remain.remove(n)
1418 for p in self.changelog.parents(n):
1389 for p in self.changelog.parents(n):
1419 remove.append(p)
1390 remove.append(p)
1420
1391
1421 # find every node whose parents have been pruned
1392 # find every node whose parents have been pruned
1422 subset = []
1393 subset = []
1423 # find every remote head that will get new children
1394 # find every remote head that will get new children
1424 updated_heads = {}
1395 updated_heads = {}
1425 for n in remain:
1396 for n in remain:
1426 p1, p2 = self.changelog.parents(n)
1397 p1, p2 = self.changelog.parents(n)
1427 if p1 not in remain and p2 not in remain:
1398 if p1 not in remain and p2 not in remain:
1428 subset.append(n)
1399 subset.append(n)
1429 if heads:
1400 if heads:
1430 if p1 in heads:
1401 if p1 in heads:
1431 updated_heads[p1] = True
1402 updated_heads[p1] = True
1432 if p2 in heads:
1403 if p2 in heads:
1433 updated_heads[p2] = True
1404 updated_heads[p2] = True
1434
1405
1435 # this is the set of all roots we have to push
1406 # this is the set of all roots we have to push
1436 if heads:
1407 if heads:
1437 return subset, updated_heads.keys()
1408 return subset, updated_heads.keys()
1438 else:
1409 else:
1439 return subset
1410 return subset
1440
1411
1441 def pull(self, remote, heads=None, force=False):
1412 def pull(self, remote, heads=None, force=False):
1442 lock = self.lock()
1413 lock = self.lock()
1443 try:
1414 try:
1444 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1415 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1445 force=force)
1416 force=force)
1446 if fetch == [nullid]:
1417 if fetch == [nullid]:
1447 self.ui.status(_("requesting all changes\n"))
1418 self.ui.status(_("requesting all changes\n"))
1448
1419
1449 if not fetch:
1420 if not fetch:
1450 self.ui.status(_("no changes found\n"))
1421 self.ui.status(_("no changes found\n"))
1451 return 0
1422 return 0
1452
1423
1453 if heads is None and remote.capable('changegroupsubset'):
1424 if heads is None and remote.capable('changegroupsubset'):
1454 heads = rheads
1425 heads = rheads
1455
1426
1456 if heads is None:
1427 if heads is None:
1457 cg = remote.changegroup(fetch, 'pull')
1428 cg = remote.changegroup(fetch, 'pull')
1458 else:
1429 else:
1459 if not remote.capable('changegroupsubset'):
1430 if not remote.capable('changegroupsubset'):
1460 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1431 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1461 cg = remote.changegroupsubset(fetch, heads, 'pull')
1432 cg = remote.changegroupsubset(fetch, heads, 'pull')
1462 return self.addchangegroup(cg, 'pull', remote.url())
1433 return self.addchangegroup(cg, 'pull', remote.url())
1463 finally:
1434 finally:
1464 lock.release()
1435 lock.release()
1465
1436
1466 def push(self, remote, force=False, revs=None):
1437 def push(self, remote, force=False, revs=None):
1467 # there are two ways to push to remote repo:
1438 # there are two ways to push to remote repo:
1468 #
1439 #
1469 # addchangegroup assumes local user can lock remote
1440 # addchangegroup assumes local user can lock remote
1470 # repo (local filesystem, old ssh servers).
1441 # repo (local filesystem, old ssh servers).
1471 #
1442 #
1472 # unbundle assumes local user cannot lock remote repo (new ssh
1443 # unbundle assumes local user cannot lock remote repo (new ssh
1473 # servers, http servers).
1444 # servers, http servers).
1474
1445
1475 if remote.capable('unbundle'):
1446 if remote.capable('unbundle'):
1476 return self.push_unbundle(remote, force, revs)
1447 return self.push_unbundle(remote, force, revs)
1477 return self.push_addchangegroup(remote, force, revs)
1448 return self.push_addchangegroup(remote, force, revs)
1478
1449
1479 def prepush(self, remote, force, revs):
1450 def prepush(self, remote, force, revs):
1480 common = {}
1451 common = {}
1481 remote_heads = remote.heads()
1452 remote_heads = remote.heads()
1482 inc = self.findincoming(remote, common, remote_heads, force=force)
1453 inc = self.findincoming(remote, common, remote_heads, force=force)
1483
1454
1484 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1455 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1485 if revs is not None:
1456 if revs is not None:
1486 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1457 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1487 else:
1458 else:
1488 bases, heads = update, self.changelog.heads()
1459 bases, heads = update, self.changelog.heads()
1489
1460
1490 if not bases:
1461 if not bases:
1491 self.ui.status(_("no changes found\n"))
1462 self.ui.status(_("no changes found\n"))
1492 return None, 1
1463 return None, 1
1493 elif not force:
1464 elif not force:
1494 # check if we're creating new remote heads
1465 # check if we're creating new remote heads
1495 # to be a remote head after push, node must be either
1466 # to be a remote head after push, node must be either
1496 # - unknown locally
1467 # - unknown locally
1497 # - a local outgoing head descended from update
1468 # - a local outgoing head descended from update
1498 # - a remote head that's known locally and not
1469 # - a remote head that's known locally and not
1499 # ancestral to an outgoing head
1470 # ancestral to an outgoing head
1500
1471
1501 warn = 0
1472 warn = 0
1502
1473
1503 if remote_heads == [nullid]:
1474 if remote_heads == [nullid]:
1504 warn = 0
1475 warn = 0
1505 elif not revs and len(heads) > len(remote_heads):
1476 elif not revs and len(heads) > len(remote_heads):
1506 warn = 1
1477 warn = 1
1507 else:
1478 else:
1508 newheads = list(heads)
1479 newheads = list(heads)
1509 for r in remote_heads:
1480 for r in remote_heads:
1510 if r in self.changelog.nodemap:
1481 if r in self.changelog.nodemap:
1511 desc = self.changelog.heads(r, heads)
1482 desc = self.changelog.heads(r, heads)
1512 l = [h for h in heads if h in desc]
1483 l = [h for h in heads if h in desc]
1513 if not l:
1484 if not l:
1514 newheads.append(r)
1485 newheads.append(r)
1515 else:
1486 else:
1516 newheads.append(r)
1487 newheads.append(r)
1517 if len(newheads) > len(remote_heads):
1488 if len(newheads) > len(remote_heads):
1518 warn = 1
1489 warn = 1
1519
1490
1520 if warn:
1491 if warn:
1521 self.ui.warn(_("abort: push creates new remote heads!\n"))
1492 self.ui.warn(_("abort: push creates new remote heads!\n"))
1522 self.ui.status(_("(did you forget to merge?"
1493 self.ui.status(_("(did you forget to merge?"
1523 " use push -f to force)\n"))
1494 " use push -f to force)\n"))
1524 return None, 0
1495 return None, 0
1525 elif inc:
1496 elif inc:
1526 self.ui.warn(_("note: unsynced remote changes!\n"))
1497 self.ui.warn(_("note: unsynced remote changes!\n"))
1527
1498
1528
1499
1529 if revs is None:
1500 if revs is None:
1530 # use the fast path, no race possible on push
1501 # use the fast path, no race possible on push
1531 cg = self._changegroup(common.keys(), 'push')
1502 cg = self._changegroup(common.keys(), 'push')
1532 else:
1503 else:
1533 cg = self.changegroupsubset(update, revs, 'push')
1504 cg = self.changegroupsubset(update, revs, 'push')
1534 return cg, remote_heads
1505 return cg, remote_heads
1535
1506
1536 def push_addchangegroup(self, remote, force, revs):
1507 def push_addchangegroup(self, remote, force, revs):
1537 lock = remote.lock()
1508 lock = remote.lock()
1538 try:
1509 try:
1539 ret = self.prepush(remote, force, revs)
1510 ret = self.prepush(remote, force, revs)
1540 if ret[0] is not None:
1511 if ret[0] is not None:
1541 cg, remote_heads = ret
1512 cg, remote_heads = ret
1542 return remote.addchangegroup(cg, 'push', self.url())
1513 return remote.addchangegroup(cg, 'push', self.url())
1543 return ret[1]
1514 return ret[1]
1544 finally:
1515 finally:
1545 lock.release()
1516 lock.release()
1546
1517
1547 def push_unbundle(self, remote, force, revs):
1518 def push_unbundle(self, remote, force, revs):
1548 # local repo finds heads on server, finds out what revs it
1519 # local repo finds heads on server, finds out what revs it
1549 # must push. once revs transferred, if server finds it has
1520 # must push. once revs transferred, if server finds it has
1550 # different heads (someone else won commit/push race), server
1521 # different heads (someone else won commit/push race), server
1551 # aborts.
1522 # aborts.
1552
1523
1553 ret = self.prepush(remote, force, revs)
1524 ret = self.prepush(remote, force, revs)
1554 if ret[0] is not None:
1525 if ret[0] is not None:
1555 cg, remote_heads = ret
1526 cg, remote_heads = ret
1556 if force: remote_heads = ['force']
1527 if force: remote_heads = ['force']
1557 return remote.unbundle(cg, remote_heads, 'push')
1528 return remote.unbundle(cg, remote_heads, 'push')
1558 return ret[1]
1529 return ret[1]
1559
1530
1560 def changegroupinfo(self, nodes, source):
1531 def changegroupinfo(self, nodes, source):
1561 if self.ui.verbose or source == 'bundle':
1532 if self.ui.verbose or source == 'bundle':
1562 self.ui.status(_("%d changesets found\n") % len(nodes))
1533 self.ui.status(_("%d changesets found\n") % len(nodes))
1563 if self.ui.debugflag:
1534 if self.ui.debugflag:
1564 self.ui.debug(_("list of changesets:\n"))
1535 self.ui.debug(_("list of changesets:\n"))
1565 for node in nodes:
1536 for node in nodes:
1566 self.ui.debug("%s\n" % hex(node))
1537 self.ui.debug("%s\n" % hex(node))
1567
1538
1568 def changegroupsubset(self, bases, heads, source, extranodes=None):
1539 def changegroupsubset(self, bases, heads, source, extranodes=None):
1569 """This function generates a changegroup consisting of all the nodes
1540 """This function generates a changegroup consisting of all the nodes
1570 that are descendents of any of the bases, and ancestors of any of
1541 that are descendents of any of the bases, and ancestors of any of
1571 the heads.
1542 the heads.
1572
1543
1573 It is fairly complex as determining which filenodes and which
1544 It is fairly complex as determining which filenodes and which
1574 manifest nodes need to be included for the changeset to be complete
1545 manifest nodes need to be included for the changeset to be complete
1575 is non-trivial.
1546 is non-trivial.
1576
1547
1577 Another wrinkle is doing the reverse, figuring out which changeset in
1548 Another wrinkle is doing the reverse, figuring out which changeset in
1578 the changegroup a particular filenode or manifestnode belongs to.
1549 the changegroup a particular filenode or manifestnode belongs to.
1579
1550
1580 The caller can specify some nodes that must be included in the
1551 The caller can specify some nodes that must be included in the
1581 changegroup using the extranodes argument. It should be a dict
1552 changegroup using the extranodes argument. It should be a dict
1582 where the keys are the filenames (or 1 for the manifest), and the
1553 where the keys are the filenames (or 1 for the manifest), and the
1583 values are lists of (node, linknode) tuples, where node is a wanted
1554 values are lists of (node, linknode) tuples, where node is a wanted
1584 node and linknode is the changelog node that should be transmitted as
1555 node and linknode is the changelog node that should be transmitted as
1585 the linkrev.
1556 the linkrev.
1586 """
1557 """
1587
1558
1588 if extranodes is None:
1559 if extranodes is None:
1589 # can we go through the fast path ?
1560 # can we go through the fast path ?
1590 heads.sort()
1561 heads.sort()
1591 allheads = self.heads()
1562 allheads = self.heads()
1592 allheads.sort()
1563 allheads.sort()
1593 if heads == allheads:
1564 if heads == allheads:
1594 common = []
1565 common = []
1595 # parents of bases are known from both sides
1566 # parents of bases are known from both sides
1596 for n in bases:
1567 for n in bases:
1597 for p in self.changelog.parents(n):
1568 for p in self.changelog.parents(n):
1598 if p != nullid:
1569 if p != nullid:
1599 common.append(p)
1570 common.append(p)
1600 return self._changegroup(common, source)
1571 return self._changegroup(common, source)
1601
1572
1602 self.hook('preoutgoing', throw=True, source=source)
1573 self.hook('preoutgoing', throw=True, source=source)
1603
1574
1604 # Set up some initial variables
1575 # Set up some initial variables
1605 # Make it easy to refer to self.changelog
1576 # Make it easy to refer to self.changelog
1606 cl = self.changelog
1577 cl = self.changelog
1607 # msng is short for missing - compute the list of changesets in this
1578 # msng is short for missing - compute the list of changesets in this
1608 # changegroup.
1579 # changegroup.
1609 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1580 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1610 self.changegroupinfo(msng_cl_lst, source)
1581 self.changegroupinfo(msng_cl_lst, source)
1611 # Some bases may turn out to be superfluous, and some heads may be
1582 # Some bases may turn out to be superfluous, and some heads may be
1612 # too. nodesbetween will return the minimal set of bases and heads
1583 # too. nodesbetween will return the minimal set of bases and heads
1613 # necessary to re-create the changegroup.
1584 # necessary to re-create the changegroup.
1614
1585
1615 # Known heads are the list of heads that it is assumed the recipient
1586 # Known heads are the list of heads that it is assumed the recipient
1616 # of this changegroup will know about.
1587 # of this changegroup will know about.
1617 knownheads = {}
1588 knownheads = {}
1618 # We assume that all parents of bases are known heads.
1589 # We assume that all parents of bases are known heads.
1619 for n in bases:
1590 for n in bases:
1620 for p in cl.parents(n):
1591 for p in cl.parents(n):
1621 if p != nullid:
1592 if p != nullid:
1622 knownheads[p] = 1
1593 knownheads[p] = 1
1623 knownheads = knownheads.keys()
1594 knownheads = knownheads.keys()
1624 if knownheads:
1595 if knownheads:
1625 # Now that we know what heads are known, we can compute which
1596 # Now that we know what heads are known, we can compute which
1626 # changesets are known. The recipient must know about all
1597 # changesets are known. The recipient must know about all
1627 # changesets required to reach the known heads from the null
1598 # changesets required to reach the known heads from the null
1628 # changeset.
1599 # changeset.
1629 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1600 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1630 junk = None
1601 junk = None
1631 # Transform the list into a set.
1602 # Transform the list into a set.
1632 has_cl_set = set(has_cl_set)
1603 has_cl_set = set(has_cl_set)
1633 else:
1604 else:
1634 # If there were no known heads, the recipient cannot be assumed to
1605 # If there were no known heads, the recipient cannot be assumed to
1635 # know about any changesets.
1606 # know about any changesets.
1636 has_cl_set = set()
1607 has_cl_set = set()
1637
1608
1638 # Make it easy to refer to self.manifest
1609 # Make it easy to refer to self.manifest
1639 mnfst = self.manifest
1610 mnfst = self.manifest
1640 # We don't know which manifests are missing yet
1611 # We don't know which manifests are missing yet
1641 msng_mnfst_set = {}
1612 msng_mnfst_set = {}
1642 # Nor do we know which filenodes are missing.
1613 # Nor do we know which filenodes are missing.
1643 msng_filenode_set = {}
1614 msng_filenode_set = {}
1644
1615
1645 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1616 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1646 junk = None
1617 junk = None
1647
1618
1648 # A changeset always belongs to itself, so the changenode lookup
1619 # A changeset always belongs to itself, so the changenode lookup
1649 # function for a changenode is identity.
1620 # function for a changenode is identity.
1650 def identity(x):
1621 def identity(x):
1651 return x
1622 return x
1652
1623
1653 # A function generating function. Sets up an environment for the
1624 # A function generating function. Sets up an environment for the
1654 # inner function.
1625 # inner function.
1655 def cmp_by_rev_func(revlog):
1626 def cmp_by_rev_func(revlog):
1656 # Compare two nodes by their revision number in the environment's
1627 # Compare two nodes by their revision number in the environment's
1657 # revision history. Since the revision number both represents the
1628 # revision history. Since the revision number both represents the
1658 # most efficient order to read the nodes in, and represents a
1629 # most efficient order to read the nodes in, and represents a
1659 # topological sorting of the nodes, this function is often useful.
1630 # topological sorting of the nodes, this function is often useful.
1660 def cmp_by_rev(a, b):
1631 def cmp_by_rev(a, b):
1661 return cmp(revlog.rev(a), revlog.rev(b))
1632 return cmp(revlog.rev(a), revlog.rev(b))
1662 return cmp_by_rev
1633 return cmp_by_rev
1663
1634
1664 # If we determine that a particular file or manifest node must be a
1635 # If we determine that a particular file or manifest node must be a
1665 # node that the recipient of the changegroup will already have, we can
1636 # node that the recipient of the changegroup will already have, we can
1666 # also assume the recipient will have all the parents. This function
1637 # also assume the recipient will have all the parents. This function
1667 # prunes them from the set of missing nodes.
1638 # prunes them from the set of missing nodes.
1668 def prune_parents(revlog, hasset, msngset):
1639 def prune_parents(revlog, hasset, msngset):
1669 haslst = hasset.keys()
1640 haslst = hasset.keys()
1670 haslst.sort(cmp_by_rev_func(revlog))
1641 haslst.sort(cmp_by_rev_func(revlog))
1671 for node in haslst:
1642 for node in haslst:
1672 parentlst = [p for p in revlog.parents(node) if p != nullid]
1643 parentlst = [p for p in revlog.parents(node) if p != nullid]
1673 while parentlst:
1644 while parentlst:
1674 n = parentlst.pop()
1645 n = parentlst.pop()
1675 if n not in hasset:
1646 if n not in hasset:
1676 hasset[n] = 1
1647 hasset[n] = 1
1677 p = [p for p in revlog.parents(n) if p != nullid]
1648 p = [p for p in revlog.parents(n) if p != nullid]
1678 parentlst.extend(p)
1649 parentlst.extend(p)
1679 for n in hasset:
1650 for n in hasset:
1680 msngset.pop(n, None)
1651 msngset.pop(n, None)
1681
1652
1682 # This is a function generating function used to set up an environment
1653 # This is a function generating function used to set up an environment
1683 # for the inner function to execute in.
1654 # for the inner function to execute in.
1684 def manifest_and_file_collector(changedfileset):
1655 def manifest_and_file_collector(changedfileset):
1685 # This is an information gathering function that gathers
1656 # This is an information gathering function that gathers
1686 # information from each changeset node that goes out as part of
1657 # information from each changeset node that goes out as part of
1687 # the changegroup. The information gathered is a list of which
1658 # the changegroup. The information gathered is a list of which
1688 # manifest nodes are potentially required (the recipient may
1659 # manifest nodes are potentially required (the recipient may
1689 # already have them) and total list of all files which were
1660 # already have them) and total list of all files which were
1690 # changed in any changeset in the changegroup.
1661 # changed in any changeset in the changegroup.
1691 #
1662 #
1692 # We also remember the first changenode we saw any manifest
1663 # We also remember the first changenode we saw any manifest
1693 # referenced by so we can later determine which changenode 'owns'
1664 # referenced by so we can later determine which changenode 'owns'
1694 # the manifest.
1665 # the manifest.
1695 def collect_manifests_and_files(clnode):
1666 def collect_manifests_and_files(clnode):
1696 c = cl.read(clnode)
1667 c = cl.read(clnode)
1697 for f in c[3]:
1668 for f in c[3]:
1698 # This is to make sure we only have one instance of each
1669 # This is to make sure we only have one instance of each
1699 # filename string for each filename.
1670 # filename string for each filename.
1700 changedfileset.setdefault(f, f)
1671 changedfileset.setdefault(f, f)
1701 msng_mnfst_set.setdefault(c[0], clnode)
1672 msng_mnfst_set.setdefault(c[0], clnode)
1702 return collect_manifests_and_files
1673 return collect_manifests_and_files
1703
1674
1704 # Figure out which manifest nodes (of the ones we think might be part
1675 # Figure out which manifest nodes (of the ones we think might be part
1705 # of the changegroup) the recipient must know about and remove them
1676 # of the changegroup) the recipient must know about and remove them
1706 # from the changegroup.
1677 # from the changegroup.
1707 def prune_manifests():
1678 def prune_manifests():
1708 has_mnfst_set = {}
1679 has_mnfst_set = {}
1709 for n in msng_mnfst_set:
1680 for n in msng_mnfst_set:
1710 # If a 'missing' manifest thinks it belongs to a changenode
1681 # If a 'missing' manifest thinks it belongs to a changenode
1711 # the recipient is assumed to have, obviously the recipient
1682 # the recipient is assumed to have, obviously the recipient
1712 # must have that manifest.
1683 # must have that manifest.
1713 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1684 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1714 if linknode in has_cl_set:
1685 if linknode in has_cl_set:
1715 has_mnfst_set[n] = 1
1686 has_mnfst_set[n] = 1
1716 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1687 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1717
1688
1718 # Use the information collected in collect_manifests_and_files to say
1689 # Use the information collected in collect_manifests_and_files to say
1719 # which changenode any manifestnode belongs to.
1690 # which changenode any manifestnode belongs to.
1720 def lookup_manifest_link(mnfstnode):
1691 def lookup_manifest_link(mnfstnode):
1721 return msng_mnfst_set[mnfstnode]
1692 return msng_mnfst_set[mnfstnode]
1722
1693
1723 # A function generating function that sets up the initial environment
1694 # A function generating function that sets up the initial environment
1724 # the inner function.
1695 # the inner function.
1725 def filenode_collector(changedfiles):
1696 def filenode_collector(changedfiles):
1726 next_rev = [0]
1697 next_rev = [0]
1727 # This gathers information from each manifestnode included in the
1698 # This gathers information from each manifestnode included in the
1728 # changegroup about which filenodes the manifest node references
1699 # changegroup about which filenodes the manifest node references
1729 # so we can include those in the changegroup too.
1700 # so we can include those in the changegroup too.
1730 #
1701 #
1731 # It also remembers which changenode each filenode belongs to. It
1702 # It also remembers which changenode each filenode belongs to. It
1732 # does this by assuming the a filenode belongs to the changenode
1703 # does this by assuming the a filenode belongs to the changenode
1733 # the first manifest that references it belongs to.
1704 # the first manifest that references it belongs to.
1734 def collect_msng_filenodes(mnfstnode):
1705 def collect_msng_filenodes(mnfstnode):
1735 r = mnfst.rev(mnfstnode)
1706 r = mnfst.rev(mnfstnode)
1736 if r == next_rev[0]:
1707 if r == next_rev[0]:
1737 # If the last rev we looked at was the one just previous,
1708 # If the last rev we looked at was the one just previous,
1738 # we only need to see a diff.
1709 # we only need to see a diff.
1739 deltamf = mnfst.readdelta(mnfstnode)
1710 deltamf = mnfst.readdelta(mnfstnode)
1740 # For each line in the delta
1711 # For each line in the delta
1741 for f, fnode in deltamf.iteritems():
1712 for f, fnode in deltamf.iteritems():
1742 f = changedfiles.get(f, None)
1713 f = changedfiles.get(f, None)
1743 # And if the file is in the list of files we care
1714 # And if the file is in the list of files we care
1744 # about.
1715 # about.
1745 if f is not None:
1716 if f is not None:
1746 # Get the changenode this manifest belongs to
1717 # Get the changenode this manifest belongs to
1747 clnode = msng_mnfst_set[mnfstnode]
1718 clnode = msng_mnfst_set[mnfstnode]
1748 # Create the set of filenodes for the file if
1719 # Create the set of filenodes for the file if
1749 # there isn't one already.
1720 # there isn't one already.
1750 ndset = msng_filenode_set.setdefault(f, {})
1721 ndset = msng_filenode_set.setdefault(f, {})
1751 # And set the filenode's changelog node to the
1722 # And set the filenode's changelog node to the
1752 # manifest's if it hasn't been set already.
1723 # manifest's if it hasn't been set already.
1753 ndset.setdefault(fnode, clnode)
1724 ndset.setdefault(fnode, clnode)
1754 else:
1725 else:
1755 # Otherwise we need a full manifest.
1726 # Otherwise we need a full manifest.
1756 m = mnfst.read(mnfstnode)
1727 m = mnfst.read(mnfstnode)
1757 # For every file in we care about.
1728 # For every file in we care about.
1758 for f in changedfiles:
1729 for f in changedfiles:
1759 fnode = m.get(f, None)
1730 fnode = m.get(f, None)
1760 # If it's in the manifest
1731 # If it's in the manifest
1761 if fnode is not None:
1732 if fnode is not None:
1762 # See comments above.
1733 # See comments above.
1763 clnode = msng_mnfst_set[mnfstnode]
1734 clnode = msng_mnfst_set[mnfstnode]
1764 ndset = msng_filenode_set.setdefault(f, {})
1735 ndset = msng_filenode_set.setdefault(f, {})
1765 ndset.setdefault(fnode, clnode)
1736 ndset.setdefault(fnode, clnode)
1766 # Remember the revision we hope to see next.
1737 # Remember the revision we hope to see next.
1767 next_rev[0] = r + 1
1738 next_rev[0] = r + 1
1768 return collect_msng_filenodes
1739 return collect_msng_filenodes
1769
1740
1770 # We have a list of filenodes we think we need for a file, lets remove
1741 # We have a list of filenodes we think we need for a file, lets remove
1771 # all those we know the recipient must have.
1742 # all those we know the recipient must have.
1772 def prune_filenodes(f, filerevlog):
1743 def prune_filenodes(f, filerevlog):
1773 msngset = msng_filenode_set[f]
1744 msngset = msng_filenode_set[f]
1774 hasset = {}
1745 hasset = {}
1775 # If a 'missing' filenode thinks it belongs to a changenode we
1746 # If a 'missing' filenode thinks it belongs to a changenode we
1776 # assume the recipient must have, then the recipient must have
1747 # assume the recipient must have, then the recipient must have
1777 # that filenode.
1748 # that filenode.
1778 for n in msngset:
1749 for n in msngset:
1779 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1750 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1780 if clnode in has_cl_set:
1751 if clnode in has_cl_set:
1781 hasset[n] = 1
1752 hasset[n] = 1
1782 prune_parents(filerevlog, hasset, msngset)
1753 prune_parents(filerevlog, hasset, msngset)
1783
1754
1784 # A function generator function that sets up the a context for the
1755 # A function generator function that sets up the a context for the
1785 # inner function.
1756 # inner function.
1786 def lookup_filenode_link_func(fname):
1757 def lookup_filenode_link_func(fname):
1787 msngset = msng_filenode_set[fname]
1758 msngset = msng_filenode_set[fname]
1788 # Lookup the changenode the filenode belongs to.
1759 # Lookup the changenode the filenode belongs to.
1789 def lookup_filenode_link(fnode):
1760 def lookup_filenode_link(fnode):
1790 return msngset[fnode]
1761 return msngset[fnode]
1791 return lookup_filenode_link
1762 return lookup_filenode_link
1792
1763
1793 # Add the nodes that were explicitly requested.
1764 # Add the nodes that were explicitly requested.
1794 def add_extra_nodes(name, nodes):
1765 def add_extra_nodes(name, nodes):
1795 if not extranodes or name not in extranodes:
1766 if not extranodes or name not in extranodes:
1796 return
1767 return
1797
1768
1798 for node, linknode in extranodes[name]:
1769 for node, linknode in extranodes[name]:
1799 if node not in nodes:
1770 if node not in nodes:
1800 nodes[node] = linknode
1771 nodes[node] = linknode
1801
1772
1802 # Now that we have all theses utility functions to help out and
1773 # Now that we have all theses utility functions to help out and
1803 # logically divide up the task, generate the group.
1774 # logically divide up the task, generate the group.
1804 def gengroup():
1775 def gengroup():
1805 # The set of changed files starts empty.
1776 # The set of changed files starts empty.
1806 changedfiles = {}
1777 changedfiles = {}
1807 # Create a changenode group generator that will call our functions
1778 # Create a changenode group generator that will call our functions
1808 # back to lookup the owning changenode and collect information.
1779 # back to lookup the owning changenode and collect information.
1809 group = cl.group(msng_cl_lst, identity,
1780 group = cl.group(msng_cl_lst, identity,
1810 manifest_and_file_collector(changedfiles))
1781 manifest_and_file_collector(changedfiles))
1811 for chnk in group:
1782 for chnk in group:
1812 yield chnk
1783 yield chnk
1813
1784
1814 # The list of manifests has been collected by the generator
1785 # The list of manifests has been collected by the generator
1815 # calling our functions back.
1786 # calling our functions back.
1816 prune_manifests()
1787 prune_manifests()
1817 add_extra_nodes(1, msng_mnfst_set)
1788 add_extra_nodes(1, msng_mnfst_set)
1818 msng_mnfst_lst = msng_mnfst_set.keys()
1789 msng_mnfst_lst = msng_mnfst_set.keys()
1819 # Sort the manifestnodes by revision number.
1790 # Sort the manifestnodes by revision number.
1820 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1791 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1821 # Create a generator for the manifestnodes that calls our lookup
1792 # Create a generator for the manifestnodes that calls our lookup
1822 # and data collection functions back.
1793 # and data collection functions back.
1823 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1794 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1824 filenode_collector(changedfiles))
1795 filenode_collector(changedfiles))
1825 for chnk in group:
1796 for chnk in group:
1826 yield chnk
1797 yield chnk
1827
1798
1828 # These are no longer needed, dereference and toss the memory for
1799 # These are no longer needed, dereference and toss the memory for
1829 # them.
1800 # them.
1830 msng_mnfst_lst = None
1801 msng_mnfst_lst = None
1831 msng_mnfst_set.clear()
1802 msng_mnfst_set.clear()
1832
1803
1833 if extranodes:
1804 if extranodes:
1834 for fname in extranodes:
1805 for fname in extranodes:
1835 if isinstance(fname, int):
1806 if isinstance(fname, int):
1836 continue
1807 continue
1837 msng_filenode_set.setdefault(fname, {})
1808 msng_filenode_set.setdefault(fname, {})
1838 changedfiles[fname] = 1
1809 changedfiles[fname] = 1
1839 # Go through all our files in order sorted by name.
1810 # Go through all our files in order sorted by name.
1840 for fname in sorted(changedfiles):
1811 for fname in sorted(changedfiles):
1841 filerevlog = self.file(fname)
1812 filerevlog = self.file(fname)
1842 if not len(filerevlog):
1813 if not len(filerevlog):
1843 raise util.Abort(_("empty or missing revlog for %s") % fname)
1814 raise util.Abort(_("empty or missing revlog for %s") % fname)
1844 # Toss out the filenodes that the recipient isn't really
1815 # Toss out the filenodes that the recipient isn't really
1845 # missing.
1816 # missing.
1846 if fname in msng_filenode_set:
1817 if fname in msng_filenode_set:
1847 prune_filenodes(fname, filerevlog)
1818 prune_filenodes(fname, filerevlog)
1848 add_extra_nodes(fname, msng_filenode_set[fname])
1819 add_extra_nodes(fname, msng_filenode_set[fname])
1849 msng_filenode_lst = msng_filenode_set[fname].keys()
1820 msng_filenode_lst = msng_filenode_set[fname].keys()
1850 else:
1821 else:
1851 msng_filenode_lst = []
1822 msng_filenode_lst = []
1852 # If any filenodes are left, generate the group for them,
1823 # If any filenodes are left, generate the group for them,
1853 # otherwise don't bother.
1824 # otherwise don't bother.
1854 if len(msng_filenode_lst) > 0:
1825 if len(msng_filenode_lst) > 0:
1855 yield changegroup.chunkheader(len(fname))
1826 yield changegroup.chunkheader(len(fname))
1856 yield fname
1827 yield fname
1857 # Sort the filenodes by their revision #
1828 # Sort the filenodes by their revision #
1858 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1829 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1859 # Create a group generator and only pass in a changenode
1830 # Create a group generator and only pass in a changenode
1860 # lookup function as we need to collect no information
1831 # lookup function as we need to collect no information
1861 # from filenodes.
1832 # from filenodes.
1862 group = filerevlog.group(msng_filenode_lst,
1833 group = filerevlog.group(msng_filenode_lst,
1863 lookup_filenode_link_func(fname))
1834 lookup_filenode_link_func(fname))
1864 for chnk in group:
1835 for chnk in group:
1865 yield chnk
1836 yield chnk
1866 if fname in msng_filenode_set:
1837 if fname in msng_filenode_set:
1867 # Don't need this anymore, toss it to free memory.
1838 # Don't need this anymore, toss it to free memory.
1868 del msng_filenode_set[fname]
1839 del msng_filenode_set[fname]
1869 # Signal that no more groups are left.
1840 # Signal that no more groups are left.
1870 yield changegroup.closechunk()
1841 yield changegroup.closechunk()
1871
1842
1872 if msng_cl_lst:
1843 if msng_cl_lst:
1873 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1844 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1874
1845
1875 return util.chunkbuffer(gengroup())
1846 return util.chunkbuffer(gengroup())
1876
1847
1877 def changegroup(self, basenodes, source):
1848 def changegroup(self, basenodes, source):
1878 # to avoid a race we use changegroupsubset() (issue1320)
1849 # to avoid a race we use changegroupsubset() (issue1320)
1879 return self.changegroupsubset(basenodes, self.heads(), source)
1850 return self.changegroupsubset(basenodes, self.heads(), source)
1880
1851
1881 def _changegroup(self, common, source):
1852 def _changegroup(self, common, source):
1882 """Generate a changegroup of all nodes that we have that a recipient
1853 """Generate a changegroup of all nodes that we have that a recipient
1883 doesn't.
1854 doesn't.
1884
1855
1885 This is much easier than the previous function as we can assume that
1856 This is much easier than the previous function as we can assume that
1886 the recipient has any changenode we aren't sending them.
1857 the recipient has any changenode we aren't sending them.
1887
1858
1888 common is the set of common nodes between remote and self"""
1859 common is the set of common nodes between remote and self"""
1889
1860
1890 self.hook('preoutgoing', throw=True, source=source)
1861 self.hook('preoutgoing', throw=True, source=source)
1891
1862
1892 cl = self.changelog
1863 cl = self.changelog
1893 nodes = cl.findmissing(common)
1864 nodes = cl.findmissing(common)
1894 revset = set([cl.rev(n) for n in nodes])
1865 revset = set([cl.rev(n) for n in nodes])
1895 self.changegroupinfo(nodes, source)
1866 self.changegroupinfo(nodes, source)
1896
1867
1897 def identity(x):
1868 def identity(x):
1898 return x
1869 return x
1899
1870
1900 def gennodelst(log):
1871 def gennodelst(log):
1901 for r in log:
1872 for r in log:
1902 if log.linkrev(r) in revset:
1873 if log.linkrev(r) in revset:
1903 yield log.node(r)
1874 yield log.node(r)
1904
1875
1905 def changed_file_collector(changedfileset):
1876 def changed_file_collector(changedfileset):
1906 def collect_changed_files(clnode):
1877 def collect_changed_files(clnode):
1907 c = cl.read(clnode)
1878 c = cl.read(clnode)
1908 for fname in c[3]:
1879 for fname in c[3]:
1909 changedfileset[fname] = 1
1880 changedfileset[fname] = 1
1910 return collect_changed_files
1881 return collect_changed_files
1911
1882
1912 def lookuprevlink_func(revlog):
1883 def lookuprevlink_func(revlog):
1913 def lookuprevlink(n):
1884 def lookuprevlink(n):
1914 return cl.node(revlog.linkrev(revlog.rev(n)))
1885 return cl.node(revlog.linkrev(revlog.rev(n)))
1915 return lookuprevlink
1886 return lookuprevlink
1916
1887
1917 def gengroup():
1888 def gengroup():
1918 # construct a list of all changed files
1889 # construct a list of all changed files
1919 changedfiles = {}
1890 changedfiles = {}
1920
1891
1921 for chnk in cl.group(nodes, identity,
1892 for chnk in cl.group(nodes, identity,
1922 changed_file_collector(changedfiles)):
1893 changed_file_collector(changedfiles)):
1923 yield chnk
1894 yield chnk
1924
1895
1925 mnfst = self.manifest
1896 mnfst = self.manifest
1926 nodeiter = gennodelst(mnfst)
1897 nodeiter = gennodelst(mnfst)
1927 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1898 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1928 yield chnk
1899 yield chnk
1929
1900
1930 for fname in sorted(changedfiles):
1901 for fname in sorted(changedfiles):
1931 filerevlog = self.file(fname)
1902 filerevlog = self.file(fname)
1932 if not len(filerevlog):
1903 if not len(filerevlog):
1933 raise util.Abort(_("empty or missing revlog for %s") % fname)
1904 raise util.Abort(_("empty or missing revlog for %s") % fname)
1934 nodeiter = gennodelst(filerevlog)
1905 nodeiter = gennodelst(filerevlog)
1935 nodeiter = list(nodeiter)
1906 nodeiter = list(nodeiter)
1936 if nodeiter:
1907 if nodeiter:
1937 yield changegroup.chunkheader(len(fname))
1908 yield changegroup.chunkheader(len(fname))
1938 yield fname
1909 yield fname
1939 lookup = lookuprevlink_func(filerevlog)
1910 lookup = lookuprevlink_func(filerevlog)
1940 for chnk in filerevlog.group(nodeiter, lookup):
1911 for chnk in filerevlog.group(nodeiter, lookup):
1941 yield chnk
1912 yield chnk
1942
1913
1943 yield changegroup.closechunk()
1914 yield changegroup.closechunk()
1944
1915
1945 if nodes:
1916 if nodes:
1946 self.hook('outgoing', node=hex(nodes[0]), source=source)
1917 self.hook('outgoing', node=hex(nodes[0]), source=source)
1947
1918
1948 return util.chunkbuffer(gengroup())
1919 return util.chunkbuffer(gengroup())
1949
1920
1950 def addchangegroup(self, source, srctype, url, emptyok=False):
1921 def addchangegroup(self, source, srctype, url, emptyok=False):
1951 """add changegroup to repo.
1922 """add changegroup to repo.
1952
1923
1953 return values:
1924 return values:
1954 - nothing changed or no source: 0
1925 - nothing changed or no source: 0
1955 - more heads than before: 1+added heads (2..n)
1926 - more heads than before: 1+added heads (2..n)
1956 - less heads than before: -1-removed heads (-2..-n)
1927 - less heads than before: -1-removed heads (-2..-n)
1957 - number of heads stays the same: 1
1928 - number of heads stays the same: 1
1958 """
1929 """
1959 def csmap(x):
1930 def csmap(x):
1960 self.ui.debug(_("add changeset %s\n") % short(x))
1931 self.ui.debug(_("add changeset %s\n") % short(x))
1961 return len(cl)
1932 return len(cl)
1962
1933
1963 def revmap(x):
1934 def revmap(x):
1964 return cl.rev(x)
1935 return cl.rev(x)
1965
1936
1966 if not source:
1937 if not source:
1967 return 0
1938 return 0
1968
1939
1969 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1940 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1970
1941
1971 changesets = files = revisions = 0
1942 changesets = files = revisions = 0
1972
1943
1973 # write changelog data to temp files so concurrent readers will not see
1944 # write changelog data to temp files so concurrent readers will not see
1974 # inconsistent view
1945 # inconsistent view
1975 cl = self.changelog
1946 cl = self.changelog
1976 cl.delayupdate()
1947 cl.delayupdate()
1977 oldheads = len(cl.heads())
1948 oldheads = len(cl.heads())
1978
1949
1979 tr = self.transaction()
1950 tr = self.transaction()
1980 try:
1951 try:
1981 trp = weakref.proxy(tr)
1952 trp = weakref.proxy(tr)
1982 # pull off the changeset group
1953 # pull off the changeset group
1983 self.ui.status(_("adding changesets\n"))
1954 self.ui.status(_("adding changesets\n"))
1984 clstart = len(cl)
1955 clstart = len(cl)
1985 chunkiter = changegroup.chunkiter(source)
1956 chunkiter = changegroup.chunkiter(source)
1986 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1957 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1987 raise util.Abort(_("received changelog group is empty"))
1958 raise util.Abort(_("received changelog group is empty"))
1988 clend = len(cl)
1959 clend = len(cl)
1989 changesets = clend - clstart
1960 changesets = clend - clstart
1990
1961
1991 # pull off the manifest group
1962 # pull off the manifest group
1992 self.ui.status(_("adding manifests\n"))
1963 self.ui.status(_("adding manifests\n"))
1993 chunkiter = changegroup.chunkiter(source)
1964 chunkiter = changegroup.chunkiter(source)
1994 # no need to check for empty manifest group here:
1965 # no need to check for empty manifest group here:
1995 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1966 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1996 # no new manifest will be created and the manifest group will
1967 # no new manifest will be created and the manifest group will
1997 # be empty during the pull
1968 # be empty during the pull
1998 self.manifest.addgroup(chunkiter, revmap, trp)
1969 self.manifest.addgroup(chunkiter, revmap, trp)
1999
1970
2000 # process the files
1971 # process the files
2001 self.ui.status(_("adding file changes\n"))
1972 self.ui.status(_("adding file changes\n"))
2002 while 1:
1973 while 1:
2003 f = changegroup.getchunk(source)
1974 f = changegroup.getchunk(source)
2004 if not f:
1975 if not f:
2005 break
1976 break
2006 self.ui.debug(_("adding %s revisions\n") % f)
1977 self.ui.debug(_("adding %s revisions\n") % f)
2007 fl = self.file(f)
1978 fl = self.file(f)
2008 o = len(fl)
1979 o = len(fl)
2009 chunkiter = changegroup.chunkiter(source)
1980 chunkiter = changegroup.chunkiter(source)
2010 if fl.addgroup(chunkiter, revmap, trp) is None:
1981 if fl.addgroup(chunkiter, revmap, trp) is None:
2011 raise util.Abort(_("received file revlog group is empty"))
1982 raise util.Abort(_("received file revlog group is empty"))
2012 revisions += len(fl) - o
1983 revisions += len(fl) - o
2013 files += 1
1984 files += 1
2014
1985
2015 newheads = len(cl.heads())
1986 newheads = len(cl.heads())
2016 heads = ""
1987 heads = ""
2017 if oldheads and newheads != oldheads:
1988 if oldheads and newheads != oldheads:
2018 heads = _(" (%+d heads)") % (newheads - oldheads)
1989 heads = _(" (%+d heads)") % (newheads - oldheads)
2019
1990
2020 self.ui.status(_("added %d changesets"
1991 self.ui.status(_("added %d changesets"
2021 " with %d changes to %d files%s\n")
1992 " with %d changes to %d files%s\n")
2022 % (changesets, revisions, files, heads))
1993 % (changesets, revisions, files, heads))
2023
1994
2024 if changesets > 0:
1995 if changesets > 0:
2025 p = lambda: cl.writepending() and self.root or ""
1996 p = lambda: cl.writepending() and self.root or ""
2026 self.hook('pretxnchangegroup', throw=True,
1997 self.hook('pretxnchangegroup', throw=True,
2027 node=hex(cl.node(clstart)), source=srctype,
1998 node=hex(cl.node(clstart)), source=srctype,
2028 url=url, pending=p)
1999 url=url, pending=p)
2029
2000
2030 # make changelog see real files again
2001 # make changelog see real files again
2031 cl.finalize(trp)
2002 cl.finalize(trp)
2032
2003
2033 tr.close()
2004 tr.close()
2034 finally:
2005 finally:
2035 del tr
2006 del tr
2036
2007
2037 if changesets > 0:
2008 if changesets > 0:
2038 # forcefully update the on-disk branch cache
2009 # forcefully update the on-disk branch cache
2039 self.ui.debug(_("updating the branch cache\n"))
2010 self.ui.debug(_("updating the branch cache\n"))
2040 self.branchtags()
2011 self.branchtags()
2041 self.hook("changegroup", node=hex(cl.node(clstart)),
2012 self.hook("changegroup", node=hex(cl.node(clstart)),
2042 source=srctype, url=url)
2013 source=srctype, url=url)
2043
2014
2044 for i in xrange(clstart, clend):
2015 for i in xrange(clstart, clend):
2045 self.hook("incoming", node=hex(cl.node(i)),
2016 self.hook("incoming", node=hex(cl.node(i)),
2046 source=srctype, url=url)
2017 source=srctype, url=url)
2047
2018
2048 # never return 0 here:
2019 # never return 0 here:
2049 if newheads < oldheads:
2020 if newheads < oldheads:
2050 return newheads - oldheads - 1
2021 return newheads - oldheads - 1
2051 else:
2022 else:
2052 return newheads - oldheads + 1
2023 return newheads - oldheads + 1
2053
2024
2054
2025
2055 def stream_in(self, remote):
2026 def stream_in(self, remote):
2056 fp = remote.stream_out()
2027 fp = remote.stream_out()
2057 l = fp.readline()
2028 l = fp.readline()
2058 try:
2029 try:
2059 resp = int(l)
2030 resp = int(l)
2060 except ValueError:
2031 except ValueError:
2061 raise error.ResponseError(
2032 raise error.ResponseError(
2062 _('Unexpected response from remote server:'), l)
2033 _('Unexpected response from remote server:'), l)
2063 if resp == 1:
2034 if resp == 1:
2064 raise util.Abort(_('operation forbidden by server'))
2035 raise util.Abort(_('operation forbidden by server'))
2065 elif resp == 2:
2036 elif resp == 2:
2066 raise util.Abort(_('locking the remote repository failed'))
2037 raise util.Abort(_('locking the remote repository failed'))
2067 elif resp != 0:
2038 elif resp != 0:
2068 raise util.Abort(_('the server sent an unknown error code'))
2039 raise util.Abort(_('the server sent an unknown error code'))
2069 self.ui.status(_('streaming all changes\n'))
2040 self.ui.status(_('streaming all changes\n'))
2070 l = fp.readline()
2041 l = fp.readline()
2071 try:
2042 try:
2072 total_files, total_bytes = map(int, l.split(' ', 1))
2043 total_files, total_bytes = map(int, l.split(' ', 1))
2073 except (ValueError, TypeError):
2044 except (ValueError, TypeError):
2074 raise error.ResponseError(
2045 raise error.ResponseError(
2075 _('Unexpected response from remote server:'), l)
2046 _('Unexpected response from remote server:'), l)
2076 self.ui.status(_('%d files to transfer, %s of data\n') %
2047 self.ui.status(_('%d files to transfer, %s of data\n') %
2077 (total_files, util.bytecount(total_bytes)))
2048 (total_files, util.bytecount(total_bytes)))
2078 start = time.time()
2049 start = time.time()
2079 for i in xrange(total_files):
2050 for i in xrange(total_files):
2080 # XXX doesn't support '\n' or '\r' in filenames
2051 # XXX doesn't support '\n' or '\r' in filenames
2081 l = fp.readline()
2052 l = fp.readline()
2082 try:
2053 try:
2083 name, size = l.split('\0', 1)
2054 name, size = l.split('\0', 1)
2084 size = int(size)
2055 size = int(size)
2085 except (ValueError, TypeError):
2056 except (ValueError, TypeError):
2086 raise error.ResponseError(
2057 raise error.ResponseError(
2087 _('Unexpected response from remote server:'), l)
2058 _('Unexpected response from remote server:'), l)
2088 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2059 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2089 ofp = self.sopener(name, 'w')
2060 ofp = self.sopener(name, 'w')
2090 for chunk in util.filechunkiter(fp, limit=size):
2061 for chunk in util.filechunkiter(fp, limit=size):
2091 ofp.write(chunk)
2062 ofp.write(chunk)
2092 ofp.close()
2063 ofp.close()
2093 elapsed = time.time() - start
2064 elapsed = time.time() - start
2094 if elapsed <= 0:
2065 if elapsed <= 0:
2095 elapsed = 0.001
2066 elapsed = 0.001
2096 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2067 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2097 (util.bytecount(total_bytes), elapsed,
2068 (util.bytecount(total_bytes), elapsed,
2098 util.bytecount(total_bytes / elapsed)))
2069 util.bytecount(total_bytes / elapsed)))
2099 self.invalidate()
2070 self.invalidate()
2100 return len(self.heads()) + 1
2071 return len(self.heads()) + 1
2101
2072
2102 def clone(self, remote, heads=[], stream=False):
2073 def clone(self, remote, heads=[], stream=False):
2103 '''clone remote repository.
2074 '''clone remote repository.
2104
2075
2105 keyword arguments:
2076 keyword arguments:
2106 heads: list of revs to clone (forces use of pull)
2077 heads: list of revs to clone (forces use of pull)
2107 stream: use streaming clone if possible'''
2078 stream: use streaming clone if possible'''
2108
2079
2109 # now, all clients that can request uncompressed clones can
2080 # now, all clients that can request uncompressed clones can
2110 # read repo formats supported by all servers that can serve
2081 # read repo formats supported by all servers that can serve
2111 # them.
2082 # them.
2112
2083
2113 # if revlog format changes, client will have to check version
2084 # if revlog format changes, client will have to check version
2114 # and format flags on "stream" capability, and use
2085 # and format flags on "stream" capability, and use
2115 # uncompressed only if compatible.
2086 # uncompressed only if compatible.
2116
2087
2117 if stream and not heads and remote.capable('stream'):
2088 if stream and not heads and remote.capable('stream'):
2118 return self.stream_in(remote)
2089 return self.stream_in(remote)
2119 return self.pull(remote, heads)
2090 return self.pull(remote, heads)
2120
2091
2121 # used to avoid circular references so destructors work
2092 # used to avoid circular references so destructors work
2122 def aftertrans(files):
2093 def aftertrans(files):
2123 renamefiles = [tuple(t) for t in files]
2094 renamefiles = [tuple(t) for t in files]
2124 def a():
2095 def a():
2125 for src, dest in renamefiles:
2096 for src, dest in renamefiles:
2126 util.rename(src, dest)
2097 util.rename(src, dest)
2127 return a
2098 return a
2128
2099
2129 def instance(ui, path, create):
2100 def instance(ui, path, create):
2130 return localrepository(ui, util.drop_scheme('file', path), create)
2101 return localrepository(ui, util.drop_scheme('file', path), create)
2131
2102
2132 def islocal(path):
2103 def islocal(path):
2133 return True
2104 return True
General Comments 0
You need to be logged in to leave comments. Login now