##// END OF EJS Templates
url: move URL parsing functions into util to improve startup time...
Brodie Rao -
r14076:924c8215 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,152 +1,152 b''
1 # fetch.py - pull and merge remote changes
1 # fetch.py - pull and merge remote changes
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''pull, update and merge in one command'''
8 '''pull, update and merge in one command'''
9
9
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial.node import nullid, short
11 from mercurial.node import nullid, short
12 from mercurial import commands, cmdutil, hg, util, url, error
12 from mercurial import commands, cmdutil, hg, util, error
13 from mercurial.lock import release
13 from mercurial.lock import release
14
14
15 def fetch(ui, repo, source='default', **opts):
15 def fetch(ui, repo, source='default', **opts):
16 '''pull changes from a remote repository, merge new changes if needed.
16 '''pull changes from a remote repository, merge new changes if needed.
17
17
18 This finds all changes from the repository at the specified path
18 This finds all changes from the repository at the specified path
19 or URL and adds them to the local repository.
19 or URL and adds them to the local repository.
20
20
21 If the pulled changes add a new branch head, the head is
21 If the pulled changes add a new branch head, the head is
22 automatically merged, and the result of the merge is committed.
22 automatically merged, and the result of the merge is committed.
23 Otherwise, the working directory is updated to include the new
23 Otherwise, the working directory is updated to include the new
24 changes.
24 changes.
25
25
26 When a merge occurs, the newly pulled changes are assumed to be
26 When a merge occurs, the newly pulled changes are assumed to be
27 "authoritative". The head of the new changes is used as the first
27 "authoritative". The head of the new changes is used as the first
28 parent, with local changes as the second. To switch the merge
28 parent, with local changes as the second. To switch the merge
29 order, use --switch-parent.
29 order, use --switch-parent.
30
30
31 See :hg:`help dates` for a list of formats valid for -d/--date.
31 See :hg:`help dates` for a list of formats valid for -d/--date.
32
32
33 Returns 0 on success.
33 Returns 0 on success.
34 '''
34 '''
35
35
36 date = opts.get('date')
36 date = opts.get('date')
37 if date:
37 if date:
38 opts['date'] = util.parsedate(date)
38 opts['date'] = util.parsedate(date)
39
39
40 parent, p2 = repo.dirstate.parents()
40 parent, p2 = repo.dirstate.parents()
41 branch = repo.dirstate.branch()
41 branch = repo.dirstate.branch()
42 branchnode = repo.branchtags().get(branch)
42 branchnode = repo.branchtags().get(branch)
43 if parent != branchnode:
43 if parent != branchnode:
44 raise util.Abort(_('working dir not at branch tip '
44 raise util.Abort(_('working dir not at branch tip '
45 '(use "hg update" to check out branch tip)'))
45 '(use "hg update" to check out branch tip)'))
46
46
47 if p2 != nullid:
47 if p2 != nullid:
48 raise util.Abort(_('outstanding uncommitted merge'))
48 raise util.Abort(_('outstanding uncommitted merge'))
49
49
50 wlock = lock = None
50 wlock = lock = None
51 try:
51 try:
52 wlock = repo.wlock()
52 wlock = repo.wlock()
53 lock = repo.lock()
53 lock = repo.lock()
54 mod, add, rem, del_ = repo.status()[:4]
54 mod, add, rem, del_ = repo.status()[:4]
55
55
56 if mod or add or rem:
56 if mod or add or rem:
57 raise util.Abort(_('outstanding uncommitted changes'))
57 raise util.Abort(_('outstanding uncommitted changes'))
58 if del_:
58 if del_:
59 raise util.Abort(_('working directory is missing some files'))
59 raise util.Abort(_('working directory is missing some files'))
60 bheads = repo.branchheads(branch)
60 bheads = repo.branchheads(branch)
61 bheads = [head for head in bheads if len(repo[head].children()) == 0]
61 bheads = [head for head in bheads if len(repo[head].children()) == 0]
62 if len(bheads) > 1:
62 if len(bheads) > 1:
63 raise util.Abort(_('multiple heads in this branch '
63 raise util.Abort(_('multiple heads in this branch '
64 '(use "hg heads ." and "hg merge" to merge)'))
64 '(use "hg heads ." and "hg merge" to merge)'))
65
65
66 other = hg.repository(hg.remoteui(repo, opts),
66 other = hg.repository(hg.remoteui(repo, opts),
67 ui.expandpath(source))
67 ui.expandpath(source))
68 ui.status(_('pulling from %s\n') %
68 ui.status(_('pulling from %s\n') %
69 url.hidepassword(ui.expandpath(source)))
69 util.hidepassword(ui.expandpath(source)))
70 revs = None
70 revs = None
71 if opts['rev']:
71 if opts['rev']:
72 try:
72 try:
73 revs = [other.lookup(rev) for rev in opts['rev']]
73 revs = [other.lookup(rev) for rev in opts['rev']]
74 except error.CapabilityError:
74 except error.CapabilityError:
75 err = _("Other repository doesn't support revision lookup, "
75 err = _("Other repository doesn't support revision lookup, "
76 "so a rev cannot be specified.")
76 "so a rev cannot be specified.")
77 raise util.Abort(err)
77 raise util.Abort(err)
78
78
79 # Are there any changes at all?
79 # Are there any changes at all?
80 modheads = repo.pull(other, heads=revs)
80 modheads = repo.pull(other, heads=revs)
81 if modheads == 0:
81 if modheads == 0:
82 return 0
82 return 0
83
83
84 # Is this a simple fast-forward along the current branch?
84 # Is this a simple fast-forward along the current branch?
85 newheads = repo.branchheads(branch)
85 newheads = repo.branchheads(branch)
86 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
86 newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
87 if len(newheads) == 1:
87 if len(newheads) == 1:
88 if newchildren[0] != parent:
88 if newchildren[0] != parent:
89 return hg.clean(repo, newchildren[0])
89 return hg.clean(repo, newchildren[0])
90 else:
90 else:
91 return 0
91 return 0
92
92
93 # Are there more than one additional branch heads?
93 # Are there more than one additional branch heads?
94 newchildren = [n for n in newchildren if n != parent]
94 newchildren = [n for n in newchildren if n != parent]
95 newparent = parent
95 newparent = parent
96 if newchildren:
96 if newchildren:
97 newparent = newchildren[0]
97 newparent = newchildren[0]
98 hg.clean(repo, newparent)
98 hg.clean(repo, newparent)
99 newheads = [n for n in newheads if n != newparent]
99 newheads = [n for n in newheads if n != newparent]
100 if len(newheads) > 1:
100 if len(newheads) > 1:
101 ui.status(_('not merging with %d other new branch heads '
101 ui.status(_('not merging with %d other new branch heads '
102 '(use "hg heads ." and "hg merge" to merge them)\n') %
102 '(use "hg heads ." and "hg merge" to merge them)\n') %
103 (len(newheads) - 1))
103 (len(newheads) - 1))
104 return 1
104 return 1
105
105
106 # Otherwise, let's merge.
106 # Otherwise, let's merge.
107 err = False
107 err = False
108 if newheads:
108 if newheads:
109 # By default, we consider the repository we're pulling
109 # By default, we consider the repository we're pulling
110 # *from* as authoritative, so we merge our changes into
110 # *from* as authoritative, so we merge our changes into
111 # theirs.
111 # theirs.
112 if opts['switch_parent']:
112 if opts['switch_parent']:
113 firstparent, secondparent = newparent, newheads[0]
113 firstparent, secondparent = newparent, newheads[0]
114 else:
114 else:
115 firstparent, secondparent = newheads[0], newparent
115 firstparent, secondparent = newheads[0], newparent
116 ui.status(_('updating to %d:%s\n') %
116 ui.status(_('updating to %d:%s\n') %
117 (repo.changelog.rev(firstparent),
117 (repo.changelog.rev(firstparent),
118 short(firstparent)))
118 short(firstparent)))
119 hg.clean(repo, firstparent)
119 hg.clean(repo, firstparent)
120 ui.status(_('merging with %d:%s\n') %
120 ui.status(_('merging with %d:%s\n') %
121 (repo.changelog.rev(secondparent), short(secondparent)))
121 (repo.changelog.rev(secondparent), short(secondparent)))
122 err = hg.merge(repo, secondparent, remind=False)
122 err = hg.merge(repo, secondparent, remind=False)
123
123
124 if not err:
124 if not err:
125 # we don't translate commit messages
125 # we don't translate commit messages
126 message = (cmdutil.logmessage(opts) or
126 message = (cmdutil.logmessage(opts) or
127 ('Automated merge with %s' %
127 ('Automated merge with %s' %
128 url.removeauth(other.url())))
128 util.removeauth(other.url())))
129 editor = cmdutil.commiteditor
129 editor = cmdutil.commiteditor
130 if opts.get('force_editor') or opts.get('edit'):
130 if opts.get('force_editor') or opts.get('edit'):
131 editor = cmdutil.commitforceeditor
131 editor = cmdutil.commitforceeditor
132 n = repo.commit(message, opts['user'], opts['date'], editor=editor)
132 n = repo.commit(message, opts['user'], opts['date'], editor=editor)
133 ui.status(_('new changeset %d:%s merges remote changes '
133 ui.status(_('new changeset %d:%s merges remote changes '
134 'with local\n') % (repo.changelog.rev(n),
134 'with local\n') % (repo.changelog.rev(n),
135 short(n)))
135 short(n)))
136
136
137 return err
137 return err
138
138
139 finally:
139 finally:
140 release(lock, wlock)
140 release(lock, wlock)
141
141
142 cmdtable = {
142 cmdtable = {
143 'fetch':
143 'fetch':
144 (fetch,
144 (fetch,
145 [('r', 'rev', [],
145 [('r', 'rev', [],
146 _('a specific revision you would like to pull'), _('REV')),
146 _('a specific revision you would like to pull'), _('REV')),
147 ('e', 'edit', None, _('edit commit message')),
147 ('e', 'edit', None, _('edit commit message')),
148 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
148 ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
149 ('', 'switch-parent', None, _('switch parents when merging')),
149 ('', 'switch-parent', None, _('switch parents when merging')),
150 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
150 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
151 _('hg fetch [SOURCE]')),
151 _('hg fetch [SOURCE]')),
152 }
152 }
@@ -1,562 +1,562 b''
1 # patchbomb.py - sending Mercurial changesets as patch emails
1 # patchbomb.py - sending Mercurial changesets as patch emails
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to send changesets as (a series of) patch emails
8 '''command to send changesets as (a series of) patch emails
9
9
10 The series is started off with a "[PATCH 0 of N]" introduction, which
10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 describes the series as a whole.
11 describes the series as a whole.
12
12
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 first line of the changeset description as the subject text. The
14 first line of the changeset description as the subject text. The
15 message contains two or three body parts:
15 message contains two or three body parts:
16
16
17 - The changeset description.
17 - The changeset description.
18 - [Optional] The result of running diffstat on the patch.
18 - [Optional] The result of running diffstat on the patch.
19 - The patch itself, as generated by :hg:`export`.
19 - The patch itself, as generated by :hg:`export`.
20
20
21 Each message refers to the first in the series using the In-Reply-To
21 Each message refers to the first in the series using the In-Reply-To
22 and References headers, so they will show up as a sequence in threaded
22 and References headers, so they will show up as a sequence in threaded
23 mail and news readers, and in mail archives.
23 mail and news readers, and in mail archives.
24
24
25 To configure other defaults, add a section like this to your
25 To configure other defaults, add a section like this to your
26 configuration file::
26 configuration file::
27
27
28 [email]
28 [email]
29 from = My Name <my@email>
29 from = My Name <my@email>
30 to = recipient1, recipient2, ...
30 to = recipient1, recipient2, ...
31 cc = cc1, cc2, ...
31 cc = cc1, cc2, ...
32 bcc = bcc1, bcc2, ...
32 bcc = bcc1, bcc2, ...
33 reply-to = address1, address2, ...
33 reply-to = address1, address2, ...
34
34
35 Use ``[patchbomb]`` as configuration section name if you need to
35 Use ``[patchbomb]`` as configuration section name if you need to
36 override global ``[email]`` address settings.
36 override global ``[email]`` address settings.
37
37
38 Then you can use the :hg:`email` command to mail a series of
38 Then you can use the :hg:`email` command to mail a series of
39 changesets as a patchbomb.
39 changesets as a patchbomb.
40
40
41 You can also either configure the method option in the email section
41 You can also either configure the method option in the email section
42 to be a sendmail compatible mailer or fill out the [smtp] section so
42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 that the patchbomb extension can automatically send patchbombs
43 that the patchbomb extension can automatically send patchbombs
44 directly from the commandline. See the [email] and [smtp] sections in
44 directly from the commandline. See the [email] and [smtp] sections in
45 hgrc(5) for details.
45 hgrc(5) for details.
46 '''
46 '''
47
47
48 import os, errno, socket, tempfile, cStringIO, time
48 import os, errno, socket, tempfile, cStringIO, time
49 import email.MIMEMultipart, email.MIMEBase
49 import email.MIMEMultipart, email.MIMEBase
50 import email.Utils, email.Encoders, email.Generator
50 import email.Utils, email.Encoders, email.Generator
51 from mercurial import cmdutil, commands, hg, mail, patch, util, discovery, url
51 from mercurial import cmdutil, commands, hg, mail, patch, util, discovery
52 from mercurial.i18n import _
52 from mercurial.i18n import _
53 from mercurial.node import bin
53 from mercurial.node import bin
54
54
55 def prompt(ui, prompt, default=None, rest=':'):
55 def prompt(ui, prompt, default=None, rest=':'):
56 if not ui.interactive() and default is None:
56 if not ui.interactive() and default is None:
57 raise util.Abort(_("%s Please enter a valid value" % (prompt + rest)))
57 raise util.Abort(_("%s Please enter a valid value" % (prompt + rest)))
58 if default:
58 if default:
59 prompt += ' [%s]' % default
59 prompt += ' [%s]' % default
60 prompt += rest
60 prompt += rest
61 while True:
61 while True:
62 r = ui.prompt(prompt, default=default)
62 r = ui.prompt(prompt, default=default)
63 if r:
63 if r:
64 return r
64 return r
65 if default is not None:
65 if default is not None:
66 return default
66 return default
67 ui.warn(_('Please enter a valid value.\n'))
67 ui.warn(_('Please enter a valid value.\n'))
68
68
69 def introneeded(opts, number):
69 def introneeded(opts, number):
70 '''is an introductory message required?'''
70 '''is an introductory message required?'''
71 return number > 1 or opts.get('intro') or opts.get('desc')
71 return number > 1 or opts.get('intro') or opts.get('desc')
72
72
73 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total,
73 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total,
74 patchname=None):
74 patchname=None):
75
75
76 desc = []
76 desc = []
77 node = None
77 node = None
78 body = ''
78 body = ''
79
79
80 for line in patchlines:
80 for line in patchlines:
81 if line.startswith('#'):
81 if line.startswith('#'):
82 if line.startswith('# Node ID'):
82 if line.startswith('# Node ID'):
83 node = line.split()[-1]
83 node = line.split()[-1]
84 continue
84 continue
85 if line.startswith('diff -r') or line.startswith('diff --git'):
85 if line.startswith('diff -r') or line.startswith('diff --git'):
86 break
86 break
87 desc.append(line)
87 desc.append(line)
88
88
89 if not patchname and not node:
89 if not patchname and not node:
90 raise ValueError
90 raise ValueError
91
91
92 if opts.get('attach'):
92 if opts.get('attach'):
93 body = ('\n'.join(desc[1:]).strip() or
93 body = ('\n'.join(desc[1:]).strip() or
94 'Patch subject is complete summary.')
94 'Patch subject is complete summary.')
95 body += '\n\n\n'
95 body += '\n\n\n'
96
96
97 if opts.get('plain'):
97 if opts.get('plain'):
98 while patchlines and patchlines[0].startswith('# '):
98 while patchlines and patchlines[0].startswith('# '):
99 patchlines.pop(0)
99 patchlines.pop(0)
100 if patchlines:
100 if patchlines:
101 patchlines.pop(0)
101 patchlines.pop(0)
102 while patchlines and not patchlines[0].strip():
102 while patchlines and not patchlines[0].strip():
103 patchlines.pop(0)
103 patchlines.pop(0)
104
104
105 ds = patch.diffstat(patchlines)
105 ds = patch.diffstat(patchlines)
106 if opts.get('diffstat'):
106 if opts.get('diffstat'):
107 body += ds + '\n\n'
107 body += ds + '\n\n'
108
108
109 if opts.get('attach') or opts.get('inline'):
109 if opts.get('attach') or opts.get('inline'):
110 msg = email.MIMEMultipart.MIMEMultipart()
110 msg = email.MIMEMultipart.MIMEMultipart()
111 if body:
111 if body:
112 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
112 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
113 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch', opts.get('test'))
113 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch', opts.get('test'))
114 binnode = bin(node)
114 binnode = bin(node)
115 # if node is mq patch, it will have the patch file's name as a tag
115 # if node is mq patch, it will have the patch file's name as a tag
116 if not patchname:
116 if not patchname:
117 patchtags = [t for t in repo.nodetags(binnode)
117 patchtags = [t for t in repo.nodetags(binnode)
118 if t.endswith('.patch') or t.endswith('.diff')]
118 if t.endswith('.patch') or t.endswith('.diff')]
119 if patchtags:
119 if patchtags:
120 patchname = patchtags[0]
120 patchname = patchtags[0]
121 elif total > 1:
121 elif total > 1:
122 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
122 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
123 binnode, seqno=idx, total=total)
123 binnode, seqno=idx, total=total)
124 else:
124 else:
125 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
125 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
126 disposition = 'inline'
126 disposition = 'inline'
127 if opts.get('attach'):
127 if opts.get('attach'):
128 disposition = 'attachment'
128 disposition = 'attachment'
129 p['Content-Disposition'] = disposition + '; filename=' + patchname
129 p['Content-Disposition'] = disposition + '; filename=' + patchname
130 msg.attach(p)
130 msg.attach(p)
131 else:
131 else:
132 body += '\n'.join(patchlines)
132 body += '\n'.join(patchlines)
133 msg = mail.mimetextpatch(body, display=opts.get('test'))
133 msg = mail.mimetextpatch(body, display=opts.get('test'))
134
134
135 flag = ' '.join(opts.get('flag'))
135 flag = ' '.join(opts.get('flag'))
136 if flag:
136 if flag:
137 flag = ' ' + flag
137 flag = ' ' + flag
138
138
139 subj = desc[0].strip().rstrip('. ')
139 subj = desc[0].strip().rstrip('. ')
140 if not introneeded(opts, total):
140 if not introneeded(opts, total):
141 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
141 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
142 else:
142 else:
143 tlen = len(str(total))
143 tlen = len(str(total))
144 subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
144 subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
145 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
145 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
146 msg['X-Mercurial-Node'] = node
146 msg['X-Mercurial-Node'] = node
147 return msg, subj, ds
147 return msg, subj, ds
148
148
149 def patchbomb(ui, repo, *revs, **opts):
149 def patchbomb(ui, repo, *revs, **opts):
150 '''send changesets by email
150 '''send changesets by email
151
151
152 By default, diffs are sent in the format generated by
152 By default, diffs are sent in the format generated by
153 :hg:`export`, one per message. The series starts with a "[PATCH 0
153 :hg:`export`, one per message. The series starts with a "[PATCH 0
154 of N]" introduction, which describes the series as a whole.
154 of N]" introduction, which describes the series as a whole.
155
155
156 Each patch email has a Subject line of "[PATCH M of N] ...", using
156 Each patch email has a Subject line of "[PATCH M of N] ...", using
157 the first line of the changeset description as the subject text.
157 the first line of the changeset description as the subject text.
158 The message contains two or three parts. First, the changeset
158 The message contains two or three parts. First, the changeset
159 description.
159 description.
160
160
161 With the -d/--diffstat option, if the diffstat program is
161 With the -d/--diffstat option, if the diffstat program is
162 installed, the result of running diffstat on the patch is inserted.
162 installed, the result of running diffstat on the patch is inserted.
163
163
164 Finally, the patch itself, as generated by :hg:`export`.
164 Finally, the patch itself, as generated by :hg:`export`.
165
165
166 With the -d/--diffstat or -c/--confirm options, you will be presented
166 With the -d/--diffstat or -c/--confirm options, you will be presented
167 with a final summary of all messages and asked for confirmation before
167 with a final summary of all messages and asked for confirmation before
168 the messages are sent.
168 the messages are sent.
169
169
170 By default the patch is included as text in the email body for
170 By default the patch is included as text in the email body for
171 easy reviewing. Using the -a/--attach option will instead create
171 easy reviewing. Using the -a/--attach option will instead create
172 an attachment for the patch. With -i/--inline an inline attachment
172 an attachment for the patch. With -i/--inline an inline attachment
173 will be created.
173 will be created.
174
174
175 With -o/--outgoing, emails will be generated for patches not found
175 With -o/--outgoing, emails will be generated for patches not found
176 in the destination repository (or only those which are ancestors
176 in the destination repository (or only those which are ancestors
177 of the specified revisions if any are provided)
177 of the specified revisions if any are provided)
178
178
179 With -b/--bundle, changesets are selected as for --outgoing, but a
179 With -b/--bundle, changesets are selected as for --outgoing, but a
180 single email containing a binary Mercurial bundle as an attachment
180 single email containing a binary Mercurial bundle as an attachment
181 will be sent.
181 will be sent.
182
182
183 With -m/--mbox, instead of previewing each patchbomb message in a
183 With -m/--mbox, instead of previewing each patchbomb message in a
184 pager or sending the messages directly, it will create a UNIX
184 pager or sending the messages directly, it will create a UNIX
185 mailbox file with the patch emails. This mailbox file can be
185 mailbox file with the patch emails. This mailbox file can be
186 previewed with any mail user agent which supports UNIX mbox
186 previewed with any mail user agent which supports UNIX mbox
187 files.
187 files.
188
188
189 With -n/--test, all steps will run, but mail will not be sent.
189 With -n/--test, all steps will run, but mail will not be sent.
190 You will be prompted for an email recipient address, a subject and
190 You will be prompted for an email recipient address, a subject and
191 an introductory message describing the patches of your patchbomb.
191 an introductory message describing the patches of your patchbomb.
192 Then when all is done, patchbomb messages are displayed. If the
192 Then when all is done, patchbomb messages are displayed. If the
193 PAGER environment variable is set, your pager will be fired up once
193 PAGER environment variable is set, your pager will be fired up once
194 for each patchbomb message, so you can verify everything is alright.
194 for each patchbomb message, so you can verify everything is alright.
195
195
196 In case email sending fails, you will find a backup of your series
196 In case email sending fails, you will find a backup of your series
197 introductory message in ``.hg/last-email.txt``.
197 introductory message in ``.hg/last-email.txt``.
198
198
199 Examples::
199 Examples::
200
200
201 hg email -r 3000 # send patch 3000 only
201 hg email -r 3000 # send patch 3000 only
202 hg email -r 3000 -r 3001 # send patches 3000 and 3001
202 hg email -r 3000 -r 3001 # send patches 3000 and 3001
203 hg email -r 3000:3005 # send patches 3000 through 3005
203 hg email -r 3000:3005 # send patches 3000 through 3005
204 hg email 3000 # send patch 3000 (deprecated)
204 hg email 3000 # send patch 3000 (deprecated)
205
205
206 hg email -o # send all patches not in default
206 hg email -o # send all patches not in default
207 hg email -o DEST # send all patches not in DEST
207 hg email -o DEST # send all patches not in DEST
208 hg email -o -r 3000 # send all ancestors of 3000 not in default
208 hg email -o -r 3000 # send all ancestors of 3000 not in default
209 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
209 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
210
210
211 hg email -b # send bundle of all patches not in default
211 hg email -b # send bundle of all patches not in default
212 hg email -b DEST # send bundle of all patches not in DEST
212 hg email -b DEST # send bundle of all patches not in DEST
213 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
213 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
214 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
214 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
215
215
216 hg email -o -m mbox && # generate an mbox file...
216 hg email -o -m mbox && # generate an mbox file...
217 mutt -R -f mbox # ... and view it with mutt
217 mutt -R -f mbox # ... and view it with mutt
218 hg email -o -m mbox && # generate an mbox file ...
218 hg email -o -m mbox && # generate an mbox file ...
219 formail -s sendmail \\ # ... and use formail to send from the mbox
219 formail -s sendmail \\ # ... and use formail to send from the mbox
220 -bm -t < mbox # ... using sendmail
220 -bm -t < mbox # ... using sendmail
221
221
222 Before using this command, you will need to enable email in your
222 Before using this command, you will need to enable email in your
223 hgrc. See the [email] section in hgrc(5) for details.
223 hgrc. See the [email] section in hgrc(5) for details.
224 '''
224 '''
225
225
226 _charsets = mail._charsets(ui)
226 _charsets = mail._charsets(ui)
227
227
228 bundle = opts.get('bundle')
228 bundle = opts.get('bundle')
229 date = opts.get('date')
229 date = opts.get('date')
230 mbox = opts.get('mbox')
230 mbox = opts.get('mbox')
231 outgoing = opts.get('outgoing')
231 outgoing = opts.get('outgoing')
232 rev = opts.get('rev')
232 rev = opts.get('rev')
233 # internal option used by pbranches
233 # internal option used by pbranches
234 patches = opts.get('patches')
234 patches = opts.get('patches')
235
235
236 def getoutgoing(dest, revs):
236 def getoutgoing(dest, revs):
237 '''Return the revisions present locally but not in dest'''
237 '''Return the revisions present locally but not in dest'''
238 dest = ui.expandpath(dest or 'default-push', dest or 'default')
238 dest = ui.expandpath(dest or 'default-push', dest or 'default')
239 dest, branches = hg.parseurl(dest)
239 dest, branches = hg.parseurl(dest)
240 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
240 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
241 other = hg.repository(hg.remoteui(repo, opts), dest)
241 other = hg.repository(hg.remoteui(repo, opts), dest)
242 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
242 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
243 common, _anyinc, _heads = discovery.findcommonincoming(repo, other)
243 common, _anyinc, _heads = discovery.findcommonincoming(repo, other)
244 nodes = revs and map(repo.lookup, revs) or revs
244 nodes = revs and map(repo.lookup, revs) or revs
245 o = repo.changelog.findmissing(common, heads=nodes)
245 o = repo.changelog.findmissing(common, heads=nodes)
246 if not o:
246 if not o:
247 ui.status(_("no changes found\n"))
247 ui.status(_("no changes found\n"))
248 return []
248 return []
249 return [str(repo.changelog.rev(r)) for r in o]
249 return [str(repo.changelog.rev(r)) for r in o]
250
250
251 def getpatches(revs):
251 def getpatches(revs):
252 for r in cmdutil.revrange(repo, revs):
252 for r in cmdutil.revrange(repo, revs):
253 output = cStringIO.StringIO()
253 output = cStringIO.StringIO()
254 cmdutil.export(repo, [r], fp=output,
254 cmdutil.export(repo, [r], fp=output,
255 opts=patch.diffopts(ui, opts))
255 opts=patch.diffopts(ui, opts))
256 yield output.getvalue().split('\n')
256 yield output.getvalue().split('\n')
257
257
258 def getbundle(dest):
258 def getbundle(dest):
259 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
259 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
260 tmpfn = os.path.join(tmpdir, 'bundle')
260 tmpfn = os.path.join(tmpdir, 'bundle')
261 try:
261 try:
262 commands.bundle(ui, repo, tmpfn, dest, **opts)
262 commands.bundle(ui, repo, tmpfn, dest, **opts)
263 fp = open(tmpfn, 'rb')
263 fp = open(tmpfn, 'rb')
264 data = fp.read()
264 data = fp.read()
265 fp.close()
265 fp.close()
266 return data
266 return data
267 finally:
267 finally:
268 try:
268 try:
269 os.unlink(tmpfn)
269 os.unlink(tmpfn)
270 except:
270 except:
271 pass
271 pass
272 os.rmdir(tmpdir)
272 os.rmdir(tmpdir)
273
273
274 if not (opts.get('test') or mbox):
274 if not (opts.get('test') or mbox):
275 # really sending
275 # really sending
276 mail.validateconfig(ui)
276 mail.validateconfig(ui)
277
277
278 if not (revs or rev or outgoing or bundle or patches):
278 if not (revs or rev or outgoing or bundle or patches):
279 raise util.Abort(_('specify at least one changeset with -r or -o'))
279 raise util.Abort(_('specify at least one changeset with -r or -o'))
280
280
281 if outgoing and bundle:
281 if outgoing and bundle:
282 raise util.Abort(_("--outgoing mode always on with --bundle;"
282 raise util.Abort(_("--outgoing mode always on with --bundle;"
283 " do not re-specify --outgoing"))
283 " do not re-specify --outgoing"))
284
284
285 if outgoing or bundle:
285 if outgoing or bundle:
286 if len(revs) > 1:
286 if len(revs) > 1:
287 raise util.Abort(_("too many destinations"))
287 raise util.Abort(_("too many destinations"))
288 dest = revs and revs[0] or None
288 dest = revs and revs[0] or None
289 revs = []
289 revs = []
290
290
291 if rev:
291 if rev:
292 if revs:
292 if revs:
293 raise util.Abort(_('use only one form to specify the revision'))
293 raise util.Abort(_('use only one form to specify the revision'))
294 revs = rev
294 revs = rev
295
295
296 if outgoing:
296 if outgoing:
297 revs = getoutgoing(dest, rev)
297 revs = getoutgoing(dest, rev)
298 if bundle:
298 if bundle:
299 opts['revs'] = revs
299 opts['revs'] = revs
300
300
301 # start
301 # start
302 if date:
302 if date:
303 start_time = util.parsedate(date)
303 start_time = util.parsedate(date)
304 else:
304 else:
305 start_time = util.makedate()
305 start_time = util.makedate()
306
306
307 def genmsgid(id):
307 def genmsgid(id):
308 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
308 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
309
309
310 def getdescription(body, sender):
310 def getdescription(body, sender):
311 if opts.get('desc'):
311 if opts.get('desc'):
312 body = open(opts.get('desc')).read()
312 body = open(opts.get('desc')).read()
313 else:
313 else:
314 ui.write(_('\nWrite the introductory message for the '
314 ui.write(_('\nWrite the introductory message for the '
315 'patch series.\n\n'))
315 'patch series.\n\n'))
316 body = ui.edit(body, sender)
316 body = ui.edit(body, sender)
317 # Save serie description in case sendmail fails
317 # Save serie description in case sendmail fails
318 msgfile = repo.opener('last-email.txt', 'wb')
318 msgfile = repo.opener('last-email.txt', 'wb')
319 msgfile.write(body)
319 msgfile.write(body)
320 msgfile.close()
320 msgfile.close()
321 return body
321 return body
322
322
323 def getpatchmsgs(patches, patchnames=None):
323 def getpatchmsgs(patches, patchnames=None):
324 jumbo = []
324 jumbo = []
325 msgs = []
325 msgs = []
326
326
327 ui.write(_('This patch series consists of %d patches.\n\n')
327 ui.write(_('This patch series consists of %d patches.\n\n')
328 % len(patches))
328 % len(patches))
329
329
330 name = None
330 name = None
331 for i, p in enumerate(patches):
331 for i, p in enumerate(patches):
332 jumbo.extend(p)
332 jumbo.extend(p)
333 if patchnames:
333 if patchnames:
334 name = patchnames[i]
334 name = patchnames[i]
335 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
335 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
336 len(patches), name)
336 len(patches), name)
337 msgs.append(msg)
337 msgs.append(msg)
338
338
339 if introneeded(opts, len(patches)):
339 if introneeded(opts, len(patches)):
340 tlen = len(str(len(patches)))
340 tlen = len(str(len(patches)))
341
341
342 flag = ' '.join(opts.get('flag'))
342 flag = ' '.join(opts.get('flag'))
343 if flag:
343 if flag:
344 subj = '[PATCH %0*d of %d %s]' % (tlen, 0, len(patches), flag)
344 subj = '[PATCH %0*d of %d %s]' % (tlen, 0, len(patches), flag)
345 else:
345 else:
346 subj = '[PATCH %0*d of %d]' % (tlen, 0, len(patches))
346 subj = '[PATCH %0*d of %d]' % (tlen, 0, len(patches))
347 subj += ' ' + (opts.get('subject') or
347 subj += ' ' + (opts.get('subject') or
348 prompt(ui, 'Subject: ', rest=subj))
348 prompt(ui, 'Subject: ', rest=subj))
349
349
350 body = ''
350 body = ''
351 ds = patch.diffstat(jumbo)
351 ds = patch.diffstat(jumbo)
352 if ds and opts.get('diffstat'):
352 if ds and opts.get('diffstat'):
353 body = '\n' + ds
353 body = '\n' + ds
354
354
355 body = getdescription(body, sender)
355 body = getdescription(body, sender)
356 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
356 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
357 msg['Subject'] = mail.headencode(ui, subj, _charsets,
357 msg['Subject'] = mail.headencode(ui, subj, _charsets,
358 opts.get('test'))
358 opts.get('test'))
359
359
360 msgs.insert(0, (msg, subj, ds))
360 msgs.insert(0, (msg, subj, ds))
361 return msgs
361 return msgs
362
362
363 def getbundlemsgs(bundle):
363 def getbundlemsgs(bundle):
364 subj = (opts.get('subject')
364 subj = (opts.get('subject')
365 or prompt(ui, 'Subject:', 'A bundle for your repository'))
365 or prompt(ui, 'Subject:', 'A bundle for your repository'))
366
366
367 body = getdescription('', sender)
367 body = getdescription('', sender)
368 msg = email.MIMEMultipart.MIMEMultipart()
368 msg = email.MIMEMultipart.MIMEMultipart()
369 if body:
369 if body:
370 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
370 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
371 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
371 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
372 datapart.set_payload(bundle)
372 datapart.set_payload(bundle)
373 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
373 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
374 datapart.add_header('Content-Disposition', 'attachment',
374 datapart.add_header('Content-Disposition', 'attachment',
375 filename=bundlename)
375 filename=bundlename)
376 email.Encoders.encode_base64(datapart)
376 email.Encoders.encode_base64(datapart)
377 msg.attach(datapart)
377 msg.attach(datapart)
378 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
378 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
379 return [(msg, subj, None)]
379 return [(msg, subj, None)]
380
380
381 sender = (opts.get('from') or ui.config('email', 'from') or
381 sender = (opts.get('from') or ui.config('email', 'from') or
382 ui.config('patchbomb', 'from') or
382 ui.config('patchbomb', 'from') or
383 prompt(ui, 'From', ui.username()))
383 prompt(ui, 'From', ui.username()))
384
384
385 if patches:
385 if patches:
386 msgs = getpatchmsgs(patches, opts.get('patchnames'))
386 msgs = getpatchmsgs(patches, opts.get('patchnames'))
387 elif bundle:
387 elif bundle:
388 msgs = getbundlemsgs(getbundle(dest))
388 msgs = getbundlemsgs(getbundle(dest))
389 else:
389 else:
390 msgs = getpatchmsgs(list(getpatches(revs)))
390 msgs = getpatchmsgs(list(getpatches(revs)))
391
391
392 showaddrs = []
392 showaddrs = []
393
393
394 def getaddrs(opt, prpt=None, default=None):
394 def getaddrs(opt, prpt=None, default=None):
395 addrs = opts.get(opt.replace('-', '_'))
395 addrs = opts.get(opt.replace('-', '_'))
396 if opt != 'reply-to':
396 if opt != 'reply-to':
397 showaddr = '%s:' % opt.capitalize()
397 showaddr = '%s:' % opt.capitalize()
398 else:
398 else:
399 showaddr = 'Reply-To:'
399 showaddr = 'Reply-To:'
400
400
401 if addrs:
401 if addrs:
402 showaddrs.append('%s %s' % (showaddr, ', '.join(addrs)))
402 showaddrs.append('%s %s' % (showaddr, ', '.join(addrs)))
403 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
403 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
404
404
405 addrs = ui.config('email', opt) or ui.config('patchbomb', opt) or ''
405 addrs = ui.config('email', opt) or ui.config('patchbomb', opt) or ''
406 if not addrs and prpt:
406 if not addrs and prpt:
407 addrs = prompt(ui, prpt, default)
407 addrs = prompt(ui, prpt, default)
408
408
409 if addrs:
409 if addrs:
410 showaddrs.append('%s %s' % (showaddr, addrs))
410 showaddrs.append('%s %s' % (showaddr, addrs))
411 return mail.addrlistencode(ui, [addrs], _charsets, opts.get('test'))
411 return mail.addrlistencode(ui, [addrs], _charsets, opts.get('test'))
412
412
413 to = getaddrs('to', 'To')
413 to = getaddrs('to', 'To')
414 cc = getaddrs('cc', 'Cc', '')
414 cc = getaddrs('cc', 'Cc', '')
415 bcc = getaddrs('bcc')
415 bcc = getaddrs('bcc')
416 replyto = getaddrs('reply-to')
416 replyto = getaddrs('reply-to')
417
417
418 if opts.get('diffstat') or opts.get('confirm'):
418 if opts.get('diffstat') or opts.get('confirm'):
419 ui.write(_('\nFinal summary:\n\n'))
419 ui.write(_('\nFinal summary:\n\n'))
420 ui.write('From: %s\n' % sender)
420 ui.write('From: %s\n' % sender)
421 for addr in showaddrs:
421 for addr in showaddrs:
422 ui.write('%s\n' % addr)
422 ui.write('%s\n' % addr)
423 for m, subj, ds in msgs:
423 for m, subj, ds in msgs:
424 ui.write('Subject: %s\n' % subj)
424 ui.write('Subject: %s\n' % subj)
425 if ds:
425 if ds:
426 ui.write(ds)
426 ui.write(ds)
427 ui.write('\n')
427 ui.write('\n')
428 if ui.promptchoice(_('are you sure you want to send (yn)?'),
428 if ui.promptchoice(_('are you sure you want to send (yn)?'),
429 (_('&Yes'), _('&No'))):
429 (_('&Yes'), _('&No'))):
430 raise util.Abort(_('patchbomb canceled'))
430 raise util.Abort(_('patchbomb canceled'))
431
431
432 ui.write('\n')
432 ui.write('\n')
433
433
434 parent = opts.get('in_reply_to') or None
434 parent = opts.get('in_reply_to') or None
435 # angle brackets may be omitted, they're not semantically part of the msg-id
435 # angle brackets may be omitted, they're not semantically part of the msg-id
436 if parent is not None:
436 if parent is not None:
437 if not parent.startswith('<'):
437 if not parent.startswith('<'):
438 parent = '<' + parent
438 parent = '<' + parent
439 if not parent.endswith('>'):
439 if not parent.endswith('>'):
440 parent += '>'
440 parent += '>'
441
441
442 first = True
442 first = True
443
443
444 sender_addr = email.Utils.parseaddr(sender)[1]
444 sender_addr = email.Utils.parseaddr(sender)[1]
445 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
445 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
446 sendmail = None
446 sendmail = None
447 for i, (m, subj, ds) in enumerate(msgs):
447 for i, (m, subj, ds) in enumerate(msgs):
448 try:
448 try:
449 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
449 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
450 except TypeError:
450 except TypeError:
451 m['Message-Id'] = genmsgid('patchbomb')
451 m['Message-Id'] = genmsgid('patchbomb')
452 if parent:
452 if parent:
453 m['In-Reply-To'] = parent
453 m['In-Reply-To'] = parent
454 m['References'] = parent
454 m['References'] = parent
455 if first:
455 if first:
456 parent = m['Message-Id']
456 parent = m['Message-Id']
457 first = False
457 first = False
458
458
459 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
459 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
460 m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)
460 m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)
461
461
462 start_time = (start_time[0] + 1, start_time[1])
462 start_time = (start_time[0] + 1, start_time[1])
463 m['From'] = sender
463 m['From'] = sender
464 m['To'] = ', '.join(to)
464 m['To'] = ', '.join(to)
465 if cc:
465 if cc:
466 m['Cc'] = ', '.join(cc)
466 m['Cc'] = ', '.join(cc)
467 if bcc:
467 if bcc:
468 m['Bcc'] = ', '.join(bcc)
468 m['Bcc'] = ', '.join(bcc)
469 if replyto:
469 if replyto:
470 m['Reply-To'] = ', '.join(replyto)
470 m['Reply-To'] = ', '.join(replyto)
471 if opts.get('test'):
471 if opts.get('test'):
472 ui.status(_('Displaying '), subj, ' ...\n')
472 ui.status(_('Displaying '), subj, ' ...\n')
473 ui.flush()
473 ui.flush()
474 if 'PAGER' in os.environ and not ui.plain():
474 if 'PAGER' in os.environ and not ui.plain():
475 fp = util.popen(os.environ['PAGER'], 'w')
475 fp = util.popen(os.environ['PAGER'], 'w')
476 else:
476 else:
477 fp = ui
477 fp = ui
478 generator = email.Generator.Generator(fp, mangle_from_=False)
478 generator = email.Generator.Generator(fp, mangle_from_=False)
479 try:
479 try:
480 generator.flatten(m, 0)
480 generator.flatten(m, 0)
481 fp.write('\n')
481 fp.write('\n')
482 except IOError, inst:
482 except IOError, inst:
483 if inst.errno != errno.EPIPE:
483 if inst.errno != errno.EPIPE:
484 raise
484 raise
485 if fp is not ui:
485 if fp is not ui:
486 fp.close()
486 fp.close()
487 elif mbox:
487 elif mbox:
488 ui.status(_('Writing '), subj, ' ...\n')
488 ui.status(_('Writing '), subj, ' ...\n')
489 ui.progress(_('writing'), i, item=subj, total=len(msgs))
489 ui.progress(_('writing'), i, item=subj, total=len(msgs))
490 fp = open(mbox, 'In-Reply-To' in m and 'ab+' or 'wb+')
490 fp = open(mbox, 'In-Reply-To' in m and 'ab+' or 'wb+')
491 generator = email.Generator.Generator(fp, mangle_from_=True)
491 generator = email.Generator.Generator(fp, mangle_from_=True)
492 # Should be time.asctime(), but Windows prints 2-characters day
492 # Should be time.asctime(), but Windows prints 2-characters day
493 # of month instead of one. Make them print the same thing.
493 # of month instead of one. Make them print the same thing.
494 date = time.strftime('%a %b %d %H:%M:%S %Y',
494 date = time.strftime('%a %b %d %H:%M:%S %Y',
495 time.localtime(start_time[0]))
495 time.localtime(start_time[0]))
496 fp.write('From %s %s\n' % (sender_addr, date))
496 fp.write('From %s %s\n' % (sender_addr, date))
497 generator.flatten(m, 0)
497 generator.flatten(m, 0)
498 fp.write('\n\n')
498 fp.write('\n\n')
499 fp.close()
499 fp.close()
500 else:
500 else:
501 if not sendmail:
501 if not sendmail:
502 sendmail = mail.connect(ui)
502 sendmail = mail.connect(ui)
503 ui.status(_('Sending '), subj, ' ...\n')
503 ui.status(_('Sending '), subj, ' ...\n')
504 ui.progress(_('sending'), i, item=subj, total=len(msgs))
504 ui.progress(_('sending'), i, item=subj, total=len(msgs))
505 # Exim does not remove the Bcc field
505 # Exim does not remove the Bcc field
506 del m['Bcc']
506 del m['Bcc']
507 fp = cStringIO.StringIO()
507 fp = cStringIO.StringIO()
508 generator = email.Generator.Generator(fp, mangle_from_=False)
508 generator = email.Generator.Generator(fp, mangle_from_=False)
509 generator.flatten(m, 0)
509 generator.flatten(m, 0)
510 sendmail(sender, to + bcc + cc, fp.getvalue())
510 sendmail(sender, to + bcc + cc, fp.getvalue())
511
511
512 ui.progress(_('writing'), None)
512 ui.progress(_('writing'), None)
513 ui.progress(_('sending'), None)
513 ui.progress(_('sending'), None)
514
514
515 emailopts = [
515 emailopts = [
516 ('a', 'attach', None, _('send patches as attachments')),
516 ('a', 'attach', None, _('send patches as attachments')),
517 ('i', 'inline', None, _('send patches as inline attachments')),
517 ('i', 'inline', None, _('send patches as inline attachments')),
518 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
518 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
519 ('c', 'cc', [], _('email addresses of copy recipients')),
519 ('c', 'cc', [], _('email addresses of copy recipients')),
520 ('', 'confirm', None, _('ask for confirmation before sending')),
520 ('', 'confirm', None, _('ask for confirmation before sending')),
521 ('d', 'diffstat', None, _('add diffstat output to messages')),
521 ('d', 'diffstat', None, _('add diffstat output to messages')),
522 ('', 'date', '', _('use the given date as the sending date')),
522 ('', 'date', '', _('use the given date as the sending date')),
523 ('', 'desc', '', _('use the given file as the series description')),
523 ('', 'desc', '', _('use the given file as the series description')),
524 ('f', 'from', '', _('email address of sender')),
524 ('f', 'from', '', _('email address of sender')),
525 ('n', 'test', None, _('print messages that would be sent')),
525 ('n', 'test', None, _('print messages that would be sent')),
526 ('m', 'mbox', '',
526 ('m', 'mbox', '',
527 _('write messages to mbox file instead of sending them')),
527 _('write messages to mbox file instead of sending them')),
528 ('', 'reply-to', [], _('email addresses replies should be sent to')),
528 ('', 'reply-to', [], _('email addresses replies should be sent to')),
529 ('s', 'subject', '',
529 ('s', 'subject', '',
530 _('subject of first message (intro or single patch)')),
530 _('subject of first message (intro or single patch)')),
531 ('', 'in-reply-to', '',
531 ('', 'in-reply-to', '',
532 _('message identifier to reply to')),
532 _('message identifier to reply to')),
533 ('', 'flag', [], _('flags to add in subject prefixes')),
533 ('', 'flag', [], _('flags to add in subject prefixes')),
534 ('t', 'to', [], _('email addresses of recipients')),
534 ('t', 'to', [], _('email addresses of recipients')),
535 ]
535 ]
536
536
537
537
538 cmdtable = {
538 cmdtable = {
539 "email":
539 "email":
540 (patchbomb,
540 (patchbomb,
541 [('g', 'git', None, _('use git extended diff format')),
541 [('g', 'git', None, _('use git extended diff format')),
542 ('', 'plain', None, _('omit hg patch header')),
542 ('', 'plain', None, _('omit hg patch header')),
543 ('o', 'outgoing', None,
543 ('o', 'outgoing', None,
544 _('send changes not found in the target repository')),
544 _('send changes not found in the target repository')),
545 ('b', 'bundle', None,
545 ('b', 'bundle', None,
546 _('send changes not in target as a binary bundle')),
546 _('send changes not in target as a binary bundle')),
547 ('', 'bundlename', 'bundle',
547 ('', 'bundlename', 'bundle',
548 _('name of the bundle attachment file'), _('NAME')),
548 _('name of the bundle attachment file'), _('NAME')),
549 ('r', 'rev', [],
549 ('r', 'rev', [],
550 _('a revision to send'), _('REV')),
550 _('a revision to send'), _('REV')),
551 ('', 'force', None,
551 ('', 'force', None,
552 _('run even when remote repository is unrelated '
552 _('run even when remote repository is unrelated '
553 '(with -b/--bundle)')),
553 '(with -b/--bundle)')),
554 ('', 'base', [],
554 ('', 'base', [],
555 _('a base changeset to specify instead of a destination '
555 _('a base changeset to specify instead of a destination '
556 '(with -b/--bundle)'),
556 '(with -b/--bundle)'),
557 _('REV')),
557 _('REV')),
558 ('', 'intro', None,
558 ('', 'intro', None,
559 _('send an introduction email for a single patch')),
559 _('send an introduction email for a single patch')),
560 ] + emailopts + commands.remoteopts,
560 ] + emailopts + commands.remoteopts,
561 _('hg email [OPTION]... [DEST]...'))
561 _('hg email [OPTION]... [DEST]...'))
562 }
562 }
@@ -1,98 +1,98 b''
1 # Copyright 2009, Alexander Solovyov <piranha@piranha.org.ua>
1 # Copyright 2009, Alexander Solovyov <piranha@piranha.org.ua>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 """extend schemes with shortcuts to repository swarms
6 """extend schemes with shortcuts to repository swarms
7
7
8 This extension allows you to specify shortcuts for parent URLs with a
8 This extension allows you to specify shortcuts for parent URLs with a
9 lot of repositories to act like a scheme, for example::
9 lot of repositories to act like a scheme, for example::
10
10
11 [schemes]
11 [schemes]
12 py = http://code.python.org/hg/
12 py = http://code.python.org/hg/
13
13
14 After that you can use it like::
14 After that you can use it like::
15
15
16 hg clone py://trunk/
16 hg clone py://trunk/
17
17
18 Additionally there is support for some more complex schemas, for
18 Additionally there is support for some more complex schemas, for
19 example used by Google Code::
19 example used by Google Code::
20
20
21 [schemes]
21 [schemes]
22 gcode = http://{1}.googlecode.com/hg/
22 gcode = http://{1}.googlecode.com/hg/
23
23
24 The syntax is taken from Mercurial templates, and you have unlimited
24 The syntax is taken from Mercurial templates, and you have unlimited
25 number of variables, starting with ``{1}`` and continuing with
25 number of variables, starting with ``{1}`` and continuing with
26 ``{2}``, ``{3}`` and so on. This variables will receive parts of URL
26 ``{2}``, ``{3}`` and so on. This variables will receive parts of URL
27 supplied, split by ``/``. Anything not specified as ``{part}`` will be
27 supplied, split by ``/``. Anything not specified as ``{part}`` will be
28 just appended to an URL.
28 just appended to an URL.
29
29
30 For convenience, the extension adds these schemes by default::
30 For convenience, the extension adds these schemes by default::
31
31
32 [schemes]
32 [schemes]
33 py = http://hg.python.org/
33 py = http://hg.python.org/
34 bb = https://bitbucket.org/
34 bb = https://bitbucket.org/
35 bb+ssh = ssh://hg@bitbucket.org/
35 bb+ssh = ssh://hg@bitbucket.org/
36 gcode = https://{1}.googlecode.com/hg/
36 gcode = https://{1}.googlecode.com/hg/
37 kiln = https://{1}.kilnhg.com/Repo/
37 kiln = https://{1}.kilnhg.com/Repo/
38
38
39 You can override a predefined scheme by defining a new scheme with the
39 You can override a predefined scheme by defining a new scheme with the
40 same name.
40 same name.
41 """
41 """
42
42
43 import os, re
43 import os, re
44 from mercurial import extensions, hg, templater, url as urlmod, util
44 from mercurial import extensions, hg, templater, util
45 from mercurial.i18n import _
45 from mercurial.i18n import _
46
46
47
47
48 class ShortRepository(object):
48 class ShortRepository(object):
49 def __init__(self, url, scheme, templater):
49 def __init__(self, url, scheme, templater):
50 self.scheme = scheme
50 self.scheme = scheme
51 self.templater = templater
51 self.templater = templater
52 self.url = url
52 self.url = url
53 try:
53 try:
54 self.parts = max(map(int, re.findall(r'\{(\d+)\}', self.url)))
54 self.parts = max(map(int, re.findall(r'\{(\d+)\}', self.url)))
55 except ValueError:
55 except ValueError:
56 self.parts = 0
56 self.parts = 0
57
57
58 def __repr__(self):
58 def __repr__(self):
59 return '<ShortRepository: %s>' % self.scheme
59 return '<ShortRepository: %s>' % self.scheme
60
60
61 def instance(self, ui, url, create):
61 def instance(self, ui, url, create):
62 # Should this use urlmod.url(), or is manual parsing better?
62 # Should this use urlmod.url(), or is manual parsing better?
63 url = url.split('://', 1)[1]
63 url = url.split('://', 1)[1]
64 parts = url.split('/', self.parts)
64 parts = url.split('/', self.parts)
65 if len(parts) > self.parts:
65 if len(parts) > self.parts:
66 tail = parts[-1]
66 tail = parts[-1]
67 parts = parts[:-1]
67 parts = parts[:-1]
68 else:
68 else:
69 tail = ''
69 tail = ''
70 context = dict((str(i + 1), v) for i, v in enumerate(parts))
70 context = dict((str(i + 1), v) for i, v in enumerate(parts))
71 url = ''.join(self.templater.process(self.url, context)) + tail
71 url = ''.join(self.templater.process(self.url, context)) + tail
72 return hg._lookup(url).instance(ui, url, create)
72 return hg._lookup(url).instance(ui, url, create)
73
73
74 def hasdriveletter(orig, path):
74 def hasdriveletter(orig, path):
75 for scheme in schemes:
75 for scheme in schemes:
76 if path.startswith(scheme + ':'):
76 if path.startswith(scheme + ':'):
77 return False
77 return False
78 return orig(path)
78 return orig(path)
79
79
80 schemes = {
80 schemes = {
81 'py': 'http://hg.python.org/',
81 'py': 'http://hg.python.org/',
82 'bb': 'https://bitbucket.org/',
82 'bb': 'https://bitbucket.org/',
83 'bb+ssh': 'ssh://hg@bitbucket.org/',
83 'bb+ssh': 'ssh://hg@bitbucket.org/',
84 'gcode': 'https://{1}.googlecode.com/hg/',
84 'gcode': 'https://{1}.googlecode.com/hg/',
85 'kiln': 'https://{1}.kilnhg.com/Repo/'
85 'kiln': 'https://{1}.kilnhg.com/Repo/'
86 }
86 }
87
87
88 def extsetup(ui):
88 def extsetup(ui):
89 schemes.update(dict(ui.configitems('schemes')))
89 schemes.update(dict(ui.configitems('schemes')))
90 t = templater.engine(lambda x: x)
90 t = templater.engine(lambda x: x)
91 for scheme, url in schemes.items():
91 for scheme, url in schemes.items():
92 if (os.name == 'nt' and len(scheme) == 1 and scheme.isalpha()
92 if (os.name == 'nt' and len(scheme) == 1 and scheme.isalpha()
93 and os.path.exists('%s:\\' % scheme)):
93 and os.path.exists('%s:\\' % scheme)):
94 raise util.Abort(_('custom scheme %s:// conflicts with drive '
94 raise util.Abort(_('custom scheme %s:// conflicts with drive '
95 'letter %s:\\\n') % (scheme, scheme.upper()))
95 'letter %s:\\\n') % (scheme, scheme.upper()))
96 hg.schemes[scheme] = ShortRepository(url, scheme, t)
96 hg.schemes[scheme] = ShortRepository(url, scheme, t)
97
97
98 extensions.wrapfunction(urlmod, 'hasdriveletter', hasdriveletter)
98 extensions.wrapfunction(util, 'hasdriveletter', hasdriveletter)
@@ -1,322 +1,322 b''
1 # bundlerepo.py - repository class for viewing uncompressed bundles
1 # bundlerepo.py - repository class for viewing uncompressed bundles
2 #
2 #
3 # Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
3 # Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Repository class for viewing uncompressed bundles.
8 """Repository class for viewing uncompressed bundles.
9
9
10 This provides a read-only repository interface to bundles as if they
10 This provides a read-only repository interface to bundles as if they
11 were part of the actual repository.
11 were part of the actual repository.
12 """
12 """
13
13
14 from node import nullid
14 from node import nullid
15 from i18n import _
15 from i18n import _
16 import os, struct, tempfile, shutil
16 import os, struct, tempfile, shutil
17 import changegroup, util, mdiff, discovery
17 import changegroup, util, mdiff, discovery
18 import localrepo, changelog, manifest, filelog, revlog, error, url
18 import localrepo, changelog, manifest, filelog, revlog, error
19
19
20 class bundlerevlog(revlog.revlog):
20 class bundlerevlog(revlog.revlog):
21 def __init__(self, opener, indexfile, bundle,
21 def __init__(self, opener, indexfile, bundle,
22 linkmapper=None):
22 linkmapper=None):
23 # How it works:
23 # How it works:
24 # to retrieve a revision, we need to know the offset of
24 # to retrieve a revision, we need to know the offset of
25 # the revision in the bundle (an unbundle object).
25 # the revision in the bundle (an unbundle object).
26 #
26 #
27 # We store this offset in the index (start), to differentiate a
27 # We store this offset in the index (start), to differentiate a
28 # rev in the bundle and from a rev in the revlog, we check
28 # rev in the bundle and from a rev in the revlog, we check
29 # len(index[r]). If the tuple is bigger than 7, it is a bundle
29 # len(index[r]). If the tuple is bigger than 7, it is a bundle
30 # (it is bigger since we store the node to which the delta is)
30 # (it is bigger since we store the node to which the delta is)
31 #
31 #
32 revlog.revlog.__init__(self, opener, indexfile)
32 revlog.revlog.__init__(self, opener, indexfile)
33 self.bundle = bundle
33 self.bundle = bundle
34 self.basemap = {}
34 self.basemap = {}
35 def chunkpositer():
35 def chunkpositer():
36 while 1:
36 while 1:
37 chunk = bundle.chunk()
37 chunk = bundle.chunk()
38 if not chunk:
38 if not chunk:
39 break
39 break
40 pos = bundle.tell()
40 pos = bundle.tell()
41 yield chunk, pos - len(chunk)
41 yield chunk, pos - len(chunk)
42 n = len(self)
42 n = len(self)
43 prev = None
43 prev = None
44 for chunk, start in chunkpositer():
44 for chunk, start in chunkpositer():
45 size = len(chunk)
45 size = len(chunk)
46 if size < 80:
46 if size < 80:
47 raise util.Abort(_("invalid changegroup"))
47 raise util.Abort(_("invalid changegroup"))
48 start += 80
48 start += 80
49 size -= 80
49 size -= 80
50 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
50 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
51 if node in self.nodemap:
51 if node in self.nodemap:
52 prev = node
52 prev = node
53 continue
53 continue
54 for p in (p1, p2):
54 for p in (p1, p2):
55 if not p in self.nodemap:
55 if not p in self.nodemap:
56 raise error.LookupError(p, self.indexfile,
56 raise error.LookupError(p, self.indexfile,
57 _("unknown parent"))
57 _("unknown parent"))
58 if linkmapper is None:
58 if linkmapper is None:
59 link = n
59 link = n
60 else:
60 else:
61 link = linkmapper(cs)
61 link = linkmapper(cs)
62
62
63 if not prev:
63 if not prev:
64 prev = p1
64 prev = p1
65 # start, size, full unc. size, base (unused), link, p1, p2, node
65 # start, size, full unc. size, base (unused), link, p1, p2, node
66 e = (revlog.offset_type(start, 0), size, -1, -1, link,
66 e = (revlog.offset_type(start, 0), size, -1, -1, link,
67 self.rev(p1), self.rev(p2), node)
67 self.rev(p1), self.rev(p2), node)
68 self.basemap[n] = prev
68 self.basemap[n] = prev
69 self.index.insert(-1, e)
69 self.index.insert(-1, e)
70 self.nodemap[node] = n
70 self.nodemap[node] = n
71 prev = node
71 prev = node
72 n += 1
72 n += 1
73
73
74 def inbundle(self, rev):
74 def inbundle(self, rev):
75 """is rev from the bundle"""
75 """is rev from the bundle"""
76 if rev < 0:
76 if rev < 0:
77 return False
77 return False
78 return rev in self.basemap
78 return rev in self.basemap
79 def bundlebase(self, rev):
79 def bundlebase(self, rev):
80 return self.basemap[rev]
80 return self.basemap[rev]
81 def _chunk(self, rev):
81 def _chunk(self, rev):
82 # Warning: in case of bundle, the diff is against bundlebase,
82 # Warning: in case of bundle, the diff is against bundlebase,
83 # not against rev - 1
83 # not against rev - 1
84 # XXX: could use some caching
84 # XXX: could use some caching
85 if not self.inbundle(rev):
85 if not self.inbundle(rev):
86 return revlog.revlog._chunk(self, rev)
86 return revlog.revlog._chunk(self, rev)
87 self.bundle.seek(self.start(rev))
87 self.bundle.seek(self.start(rev))
88 return self.bundle.read(self.length(rev))
88 return self.bundle.read(self.length(rev))
89
89
90 def revdiff(self, rev1, rev2):
90 def revdiff(self, rev1, rev2):
91 """return or calculate a delta between two revisions"""
91 """return or calculate a delta between two revisions"""
92 if self.inbundle(rev1) and self.inbundle(rev2):
92 if self.inbundle(rev1) and self.inbundle(rev2):
93 # hot path for bundle
93 # hot path for bundle
94 revb = self.rev(self.bundlebase(rev2))
94 revb = self.rev(self.bundlebase(rev2))
95 if revb == rev1:
95 if revb == rev1:
96 return self._chunk(rev2)
96 return self._chunk(rev2)
97 elif not self.inbundle(rev1) and not self.inbundle(rev2):
97 elif not self.inbundle(rev1) and not self.inbundle(rev2):
98 return revlog.revlog.revdiff(self, rev1, rev2)
98 return revlog.revlog.revdiff(self, rev1, rev2)
99
99
100 return mdiff.textdiff(self.revision(self.node(rev1)),
100 return mdiff.textdiff(self.revision(self.node(rev1)),
101 self.revision(self.node(rev2)))
101 self.revision(self.node(rev2)))
102
102
103 def revision(self, node):
103 def revision(self, node):
104 """return an uncompressed revision of a given"""
104 """return an uncompressed revision of a given"""
105 if node == nullid:
105 if node == nullid:
106 return ""
106 return ""
107
107
108 text = None
108 text = None
109 chain = []
109 chain = []
110 iter_node = node
110 iter_node = node
111 rev = self.rev(iter_node)
111 rev = self.rev(iter_node)
112 # reconstruct the revision if it is from a changegroup
112 # reconstruct the revision if it is from a changegroup
113 while self.inbundle(rev):
113 while self.inbundle(rev):
114 if self._cache and self._cache[0] == iter_node:
114 if self._cache and self._cache[0] == iter_node:
115 text = self._cache[2]
115 text = self._cache[2]
116 break
116 break
117 chain.append(rev)
117 chain.append(rev)
118 iter_node = self.bundlebase(rev)
118 iter_node = self.bundlebase(rev)
119 rev = self.rev(iter_node)
119 rev = self.rev(iter_node)
120 if text is None:
120 if text is None:
121 text = revlog.revlog.revision(self, iter_node)
121 text = revlog.revlog.revision(self, iter_node)
122
122
123 while chain:
123 while chain:
124 delta = self._chunk(chain.pop())
124 delta = self._chunk(chain.pop())
125 text = mdiff.patches(text, [delta])
125 text = mdiff.patches(text, [delta])
126
126
127 p1, p2 = self.parents(node)
127 p1, p2 = self.parents(node)
128 if node != revlog.hash(text, p1, p2):
128 if node != revlog.hash(text, p1, p2):
129 raise error.RevlogError(_("integrity check failed on %s:%d")
129 raise error.RevlogError(_("integrity check failed on %s:%d")
130 % (self.datafile, self.rev(node)))
130 % (self.datafile, self.rev(node)))
131
131
132 self._cache = (node, self.rev(node), text)
132 self._cache = (node, self.rev(node), text)
133 return text
133 return text
134
134
135 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
135 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
136 raise NotImplementedError
136 raise NotImplementedError
137 def addgroup(self, revs, linkmapper, transaction):
137 def addgroup(self, revs, linkmapper, transaction):
138 raise NotImplementedError
138 raise NotImplementedError
139 def strip(self, rev, minlink):
139 def strip(self, rev, minlink):
140 raise NotImplementedError
140 raise NotImplementedError
141 def checksize(self):
141 def checksize(self):
142 raise NotImplementedError
142 raise NotImplementedError
143
143
144 class bundlechangelog(bundlerevlog, changelog.changelog):
144 class bundlechangelog(bundlerevlog, changelog.changelog):
145 def __init__(self, opener, bundle):
145 def __init__(self, opener, bundle):
146 changelog.changelog.__init__(self, opener)
146 changelog.changelog.__init__(self, opener)
147 bundlerevlog.__init__(self, opener, self.indexfile, bundle)
147 bundlerevlog.__init__(self, opener, self.indexfile, bundle)
148
148
149 class bundlemanifest(bundlerevlog, manifest.manifest):
149 class bundlemanifest(bundlerevlog, manifest.manifest):
150 def __init__(self, opener, bundle, linkmapper):
150 def __init__(self, opener, bundle, linkmapper):
151 manifest.manifest.__init__(self, opener)
151 manifest.manifest.__init__(self, opener)
152 bundlerevlog.__init__(self, opener, self.indexfile, bundle,
152 bundlerevlog.__init__(self, opener, self.indexfile, bundle,
153 linkmapper)
153 linkmapper)
154
154
155 class bundlefilelog(bundlerevlog, filelog.filelog):
155 class bundlefilelog(bundlerevlog, filelog.filelog):
156 def __init__(self, opener, path, bundle, linkmapper):
156 def __init__(self, opener, path, bundle, linkmapper):
157 filelog.filelog.__init__(self, opener, path)
157 filelog.filelog.__init__(self, opener, path)
158 bundlerevlog.__init__(self, opener, self.indexfile, bundle,
158 bundlerevlog.__init__(self, opener, self.indexfile, bundle,
159 linkmapper)
159 linkmapper)
160
160
161 class bundlerepository(localrepo.localrepository):
161 class bundlerepository(localrepo.localrepository):
162 def __init__(self, ui, path, bundlename):
162 def __init__(self, ui, path, bundlename):
163 self._tempparent = None
163 self._tempparent = None
164 try:
164 try:
165 localrepo.localrepository.__init__(self, ui, path)
165 localrepo.localrepository.__init__(self, ui, path)
166 except error.RepoError:
166 except error.RepoError:
167 self._tempparent = tempfile.mkdtemp()
167 self._tempparent = tempfile.mkdtemp()
168 localrepo.instance(ui, self._tempparent, 1)
168 localrepo.instance(ui, self._tempparent, 1)
169 localrepo.localrepository.__init__(self, ui, self._tempparent)
169 localrepo.localrepository.__init__(self, ui, self._tempparent)
170
170
171 if path:
171 if path:
172 self._url = 'bundle:' + util.expandpath(path) + '+' + bundlename
172 self._url = 'bundle:' + util.expandpath(path) + '+' + bundlename
173 else:
173 else:
174 self._url = 'bundle:' + bundlename
174 self._url = 'bundle:' + bundlename
175
175
176 self.tempfile = None
176 self.tempfile = None
177 f = util.posixfile(bundlename, "rb")
177 f = util.posixfile(bundlename, "rb")
178 self.bundle = changegroup.readbundle(f, bundlename)
178 self.bundle = changegroup.readbundle(f, bundlename)
179 if self.bundle.compressed():
179 if self.bundle.compressed():
180 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
180 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
181 suffix=".hg10un", dir=self.path)
181 suffix=".hg10un", dir=self.path)
182 self.tempfile = temp
182 self.tempfile = temp
183 fptemp = os.fdopen(fdtemp, 'wb')
183 fptemp = os.fdopen(fdtemp, 'wb')
184
184
185 try:
185 try:
186 fptemp.write("HG10UN")
186 fptemp.write("HG10UN")
187 while 1:
187 while 1:
188 chunk = self.bundle.read(2**18)
188 chunk = self.bundle.read(2**18)
189 if not chunk:
189 if not chunk:
190 break
190 break
191 fptemp.write(chunk)
191 fptemp.write(chunk)
192 finally:
192 finally:
193 fptemp.close()
193 fptemp.close()
194
194
195 f = util.posixfile(self.tempfile, "rb")
195 f = util.posixfile(self.tempfile, "rb")
196 self.bundle = changegroup.readbundle(f, bundlename)
196 self.bundle = changegroup.readbundle(f, bundlename)
197
197
198 # dict with the mapping 'filename' -> position in the bundle
198 # dict with the mapping 'filename' -> position in the bundle
199 self.bundlefilespos = {}
199 self.bundlefilespos = {}
200
200
201 @util.propertycache
201 @util.propertycache
202 def changelog(self):
202 def changelog(self):
203 c = bundlechangelog(self.sopener, self.bundle)
203 c = bundlechangelog(self.sopener, self.bundle)
204 self.manstart = self.bundle.tell()
204 self.manstart = self.bundle.tell()
205 return c
205 return c
206
206
207 @util.propertycache
207 @util.propertycache
208 def manifest(self):
208 def manifest(self):
209 self.bundle.seek(self.manstart)
209 self.bundle.seek(self.manstart)
210 m = bundlemanifest(self.sopener, self.bundle, self.changelog.rev)
210 m = bundlemanifest(self.sopener, self.bundle, self.changelog.rev)
211 self.filestart = self.bundle.tell()
211 self.filestart = self.bundle.tell()
212 return m
212 return m
213
213
214 @util.propertycache
214 @util.propertycache
215 def manstart(self):
215 def manstart(self):
216 self.changelog
216 self.changelog
217 return self.manstart
217 return self.manstart
218
218
219 @util.propertycache
219 @util.propertycache
220 def filestart(self):
220 def filestart(self):
221 self.manifest
221 self.manifest
222 return self.filestart
222 return self.filestart
223
223
224 def url(self):
224 def url(self):
225 return self._url
225 return self._url
226
226
227 def file(self, f):
227 def file(self, f):
228 if not self.bundlefilespos:
228 if not self.bundlefilespos:
229 self.bundle.seek(self.filestart)
229 self.bundle.seek(self.filestart)
230 while 1:
230 while 1:
231 chunk = self.bundle.chunk()
231 chunk = self.bundle.chunk()
232 if not chunk:
232 if not chunk:
233 break
233 break
234 self.bundlefilespos[chunk] = self.bundle.tell()
234 self.bundlefilespos[chunk] = self.bundle.tell()
235 while 1:
235 while 1:
236 c = self.bundle.chunk()
236 c = self.bundle.chunk()
237 if not c:
237 if not c:
238 break
238 break
239
239
240 if f[0] == '/':
240 if f[0] == '/':
241 f = f[1:]
241 f = f[1:]
242 if f in self.bundlefilespos:
242 if f in self.bundlefilespos:
243 self.bundle.seek(self.bundlefilespos[f])
243 self.bundle.seek(self.bundlefilespos[f])
244 return bundlefilelog(self.sopener, f, self.bundle,
244 return bundlefilelog(self.sopener, f, self.bundle,
245 self.changelog.rev)
245 self.changelog.rev)
246 else:
246 else:
247 return filelog.filelog(self.sopener, f)
247 return filelog.filelog(self.sopener, f)
248
248
249 def close(self):
249 def close(self):
250 """Close assigned bundle file immediately."""
250 """Close assigned bundle file immediately."""
251 self.bundle.close()
251 self.bundle.close()
252 if self.tempfile is not None:
252 if self.tempfile is not None:
253 os.unlink(self.tempfile)
253 os.unlink(self.tempfile)
254 if self._tempparent:
254 if self._tempparent:
255 shutil.rmtree(self._tempparent, True)
255 shutil.rmtree(self._tempparent, True)
256
256
257 def cancopy(self):
257 def cancopy(self):
258 return False
258 return False
259
259
260 def getcwd(self):
260 def getcwd(self):
261 return os.getcwd() # always outside the repo
261 return os.getcwd() # always outside the repo
262
262
263 def instance(ui, path, create):
263 def instance(ui, path, create):
264 if create:
264 if create:
265 raise util.Abort(_('cannot create new bundle repository'))
265 raise util.Abort(_('cannot create new bundle repository'))
266 parentpath = ui.config("bundle", "mainreporoot", "")
266 parentpath = ui.config("bundle", "mainreporoot", "")
267 if parentpath:
267 if parentpath:
268 # Try to make the full path relative so we get a nice, short URL.
268 # Try to make the full path relative so we get a nice, short URL.
269 # In particular, we don't want temp dir names in test outputs.
269 # In particular, we don't want temp dir names in test outputs.
270 cwd = os.getcwd()
270 cwd = os.getcwd()
271 if parentpath == cwd:
271 if parentpath == cwd:
272 parentpath = ''
272 parentpath = ''
273 else:
273 else:
274 cwd = os.path.join(cwd,'')
274 cwd = os.path.join(cwd,'')
275 if parentpath.startswith(cwd):
275 if parentpath.startswith(cwd):
276 parentpath = parentpath[len(cwd):]
276 parentpath = parentpath[len(cwd):]
277 u = url.url(path)
277 u = util.url(path)
278 path = u.localpath()
278 path = u.localpath()
279 if u.scheme == 'bundle':
279 if u.scheme == 'bundle':
280 s = path.split("+", 1)
280 s = path.split("+", 1)
281 if len(s) == 1:
281 if len(s) == 1:
282 repopath, bundlename = parentpath, s[0]
282 repopath, bundlename = parentpath, s[0]
283 else:
283 else:
284 repopath, bundlename = s
284 repopath, bundlename = s
285 else:
285 else:
286 repopath, bundlename = parentpath, path
286 repopath, bundlename = parentpath, path
287 return bundlerepository(ui, repopath, bundlename)
287 return bundlerepository(ui, repopath, bundlename)
288
288
289 def getremotechanges(ui, repo, other, revs=None, bundlename=None,
289 def getremotechanges(ui, repo, other, revs=None, bundlename=None,
290 force=False):
290 force=False):
291 tmp = discovery.findcommonincoming(repo, other, heads=revs, force=force)
291 tmp = discovery.findcommonincoming(repo, other, heads=revs, force=force)
292 common, incoming, rheads = tmp
292 common, incoming, rheads = tmp
293 if not incoming:
293 if not incoming:
294 try:
294 try:
295 os.unlink(bundlename)
295 os.unlink(bundlename)
296 except OSError:
296 except OSError:
297 pass
297 pass
298 return other, None, None, None
298 return other, None, None, None
299
299
300 bundle = None
300 bundle = None
301 if bundlename or not other.local():
301 if bundlename or not other.local():
302 # create a bundle (uncompressed if other repo is not local)
302 # create a bundle (uncompressed if other repo is not local)
303
303
304 if revs is None and other.capable('changegroupsubset'):
304 if revs is None and other.capable('changegroupsubset'):
305 revs = rheads
305 revs = rheads
306
306
307 if other.capable('getbundle'):
307 if other.capable('getbundle'):
308 cg = other.getbundle('incoming', common=common, heads=revs)
308 cg = other.getbundle('incoming', common=common, heads=revs)
309 elif revs is None:
309 elif revs is None:
310 cg = other.changegroup(incoming, "incoming")
310 cg = other.changegroup(incoming, "incoming")
311 else:
311 else:
312 cg = other.changegroupsubset(incoming, revs, 'incoming')
312 cg = other.changegroupsubset(incoming, revs, 'incoming')
313 bundletype = other.local() and "HG10BZ" or "HG10UN"
313 bundletype = other.local() and "HG10BZ" or "HG10UN"
314 fname = bundle = changegroup.writebundle(cg, bundlename, bundletype)
314 fname = bundle = changegroup.writebundle(cg, bundlename, bundletype)
315 # keep written bundle?
315 # keep written bundle?
316 if bundlename:
316 if bundlename:
317 bundle = None
317 bundle = None
318 if not other.local():
318 if not other.local():
319 # use the created uncompressed bundlerepo
319 # use the created uncompressed bundlerepo
320 other = bundlerepository(ui, repo.root, fname)
320 other = bundlerepository(ui, repo.root, fname)
321 return (other, common, incoming, bundle)
321 return (other, common, incoming, bundle)
322
322
@@ -1,4872 +1,4872 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, bin, nullid, nullrev, short
8 from node import hex, bin, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, difflib, time, tempfile
11 import os, re, sys, difflib, time, tempfile
12 import hg, scmutil, util, revlog, extensions, copies, error, bookmarks
12 import hg, scmutil, util, revlog, extensions, copies, error, bookmarks
13 import patch, help, url, encoding, templatekw, discovery
13 import patch, help, url, encoding, templatekw, discovery
14 import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server
14 import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server
15 import merge as mergemod
15 import merge as mergemod
16 import minirst, revset, templatefilters
16 import minirst, revset, templatefilters
17 import dagparser
17 import dagparser
18
18
19 # Commands start here, listed alphabetically
19 # Commands start here, listed alphabetically
20
20
21 def add(ui, repo, *pats, **opts):
21 def add(ui, repo, *pats, **opts):
22 """add the specified files on the next commit
22 """add the specified files on the next commit
23
23
24 Schedule files to be version controlled and added to the
24 Schedule files to be version controlled and added to the
25 repository.
25 repository.
26
26
27 The files will be added to the repository at the next commit. To
27 The files will be added to the repository at the next commit. To
28 undo an add before that, see :hg:`forget`.
28 undo an add before that, see :hg:`forget`.
29
29
30 If no names are given, add all files to the repository.
30 If no names are given, add all files to the repository.
31
31
32 .. container:: verbose
32 .. container:: verbose
33
33
34 An example showing how new (unknown) files are added
34 An example showing how new (unknown) files are added
35 automatically by :hg:`add`::
35 automatically by :hg:`add`::
36
36
37 $ ls
37 $ ls
38 foo.c
38 foo.c
39 $ hg status
39 $ hg status
40 ? foo.c
40 ? foo.c
41 $ hg add
41 $ hg add
42 adding foo.c
42 adding foo.c
43 $ hg status
43 $ hg status
44 A foo.c
44 A foo.c
45
45
46 Returns 0 if all files are successfully added.
46 Returns 0 if all files are successfully added.
47 """
47 """
48
48
49 m = cmdutil.match(repo, pats, opts)
49 m = cmdutil.match(repo, pats, opts)
50 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
50 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
51 opts.get('subrepos'), prefix="")
51 opts.get('subrepos'), prefix="")
52 return rejected and 1 or 0
52 return rejected and 1 or 0
53
53
54 def addremove(ui, repo, *pats, **opts):
54 def addremove(ui, repo, *pats, **opts):
55 """add all new files, delete all missing files
55 """add all new files, delete all missing files
56
56
57 Add all new files and remove all missing files from the
57 Add all new files and remove all missing files from the
58 repository.
58 repository.
59
59
60 New files are ignored if they match any of the patterns in
60 New files are ignored if they match any of the patterns in
61 ``.hgignore``. As with add, these changes take effect at the next
61 ``.hgignore``. As with add, these changes take effect at the next
62 commit.
62 commit.
63
63
64 Use the -s/--similarity option to detect renamed files. With a
64 Use the -s/--similarity option to detect renamed files. With a
65 parameter greater than 0, this compares every removed file with
65 parameter greater than 0, this compares every removed file with
66 every added file and records those similar enough as renames. This
66 every added file and records those similar enough as renames. This
67 option takes a percentage between 0 (disabled) and 100 (files must
67 option takes a percentage between 0 (disabled) and 100 (files must
68 be identical) as its parameter. Detecting renamed files this way
68 be identical) as its parameter. Detecting renamed files this way
69 can be expensive. After using this option, :hg:`status -C` can be
69 can be expensive. After using this option, :hg:`status -C` can be
70 used to check which files were identified as moved or renamed.
70 used to check which files were identified as moved or renamed.
71
71
72 Returns 0 if all files are successfully added.
72 Returns 0 if all files are successfully added.
73 """
73 """
74 try:
74 try:
75 sim = float(opts.get('similarity') or 100)
75 sim = float(opts.get('similarity') or 100)
76 except ValueError:
76 except ValueError:
77 raise util.Abort(_('similarity must be a number'))
77 raise util.Abort(_('similarity must be a number'))
78 if sim < 0 or sim > 100:
78 if sim < 0 or sim > 100:
79 raise util.Abort(_('similarity must be between 0 and 100'))
79 raise util.Abort(_('similarity must be between 0 and 100'))
80 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
80 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
81
81
82 def annotate(ui, repo, *pats, **opts):
82 def annotate(ui, repo, *pats, **opts):
83 """show changeset information by line for each file
83 """show changeset information by line for each file
84
84
85 List changes in files, showing the revision id responsible for
85 List changes in files, showing the revision id responsible for
86 each line
86 each line
87
87
88 This command is useful for discovering when a change was made and
88 This command is useful for discovering when a change was made and
89 by whom.
89 by whom.
90
90
91 Without the -a/--text option, annotate will avoid processing files
91 Without the -a/--text option, annotate will avoid processing files
92 it detects as binary. With -a, annotate will annotate the file
92 it detects as binary. With -a, annotate will annotate the file
93 anyway, although the results will probably be neither useful
93 anyway, although the results will probably be neither useful
94 nor desirable.
94 nor desirable.
95
95
96 Returns 0 on success.
96 Returns 0 on success.
97 """
97 """
98 if opts.get('follow'):
98 if opts.get('follow'):
99 # --follow is deprecated and now just an alias for -f/--file
99 # --follow is deprecated and now just an alias for -f/--file
100 # to mimic the behavior of Mercurial before version 1.5
100 # to mimic the behavior of Mercurial before version 1.5
101 opts['file'] = 1
101 opts['file'] = 1
102
102
103 datefunc = ui.quiet and util.shortdate or util.datestr
103 datefunc = ui.quiet and util.shortdate or util.datestr
104 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
104 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
105
105
106 if not pats:
106 if not pats:
107 raise util.Abort(_('at least one filename or pattern is required'))
107 raise util.Abort(_('at least one filename or pattern is required'))
108
108
109 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
109 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
110 ('number', lambda x: str(x[0].rev())),
110 ('number', lambda x: str(x[0].rev())),
111 ('changeset', lambda x: short(x[0].node())),
111 ('changeset', lambda x: short(x[0].node())),
112 ('date', getdate),
112 ('date', getdate),
113 ('file', lambda x: x[0].path()),
113 ('file', lambda x: x[0].path()),
114 ]
114 ]
115
115
116 if (not opts.get('user') and not opts.get('changeset')
116 if (not opts.get('user') and not opts.get('changeset')
117 and not opts.get('date') and not opts.get('file')):
117 and not opts.get('date') and not opts.get('file')):
118 opts['number'] = 1
118 opts['number'] = 1
119
119
120 linenumber = opts.get('line_number') is not None
120 linenumber = opts.get('line_number') is not None
121 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
121 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
122 raise util.Abort(_('at least one of -n/-c is required for -l'))
122 raise util.Abort(_('at least one of -n/-c is required for -l'))
123
123
124 funcmap = [func for op, func in opmap if opts.get(op)]
124 funcmap = [func for op, func in opmap if opts.get(op)]
125 if linenumber:
125 if linenumber:
126 lastfunc = funcmap[-1]
126 lastfunc = funcmap[-1]
127 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
127 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
128
128
129 def bad(x, y):
129 def bad(x, y):
130 raise util.Abort("%s: %s" % (x, y))
130 raise util.Abort("%s: %s" % (x, y))
131
131
132 ctx = cmdutil.revsingle(repo, opts.get('rev'))
132 ctx = cmdutil.revsingle(repo, opts.get('rev'))
133 m = cmdutil.match(repo, pats, opts)
133 m = cmdutil.match(repo, pats, opts)
134 m.bad = bad
134 m.bad = bad
135 follow = not opts.get('no_follow')
135 follow = not opts.get('no_follow')
136 for abs in ctx.walk(m):
136 for abs in ctx.walk(m):
137 fctx = ctx[abs]
137 fctx = ctx[abs]
138 if not opts.get('text') and util.binary(fctx.data()):
138 if not opts.get('text') and util.binary(fctx.data()):
139 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
139 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
140 continue
140 continue
141
141
142 lines = fctx.annotate(follow=follow, linenumber=linenumber)
142 lines = fctx.annotate(follow=follow, linenumber=linenumber)
143 pieces = []
143 pieces = []
144
144
145 for f in funcmap:
145 for f in funcmap:
146 l = [f(n) for n, dummy in lines]
146 l = [f(n) for n, dummy in lines]
147 if l:
147 if l:
148 sized = [(x, encoding.colwidth(x)) for x in l]
148 sized = [(x, encoding.colwidth(x)) for x in l]
149 ml = max([w for x, w in sized])
149 ml = max([w for x, w in sized])
150 pieces.append(["%s%s" % (' ' * (ml - w), x) for x, w in sized])
150 pieces.append(["%s%s" % (' ' * (ml - w), x) for x, w in sized])
151
151
152 if pieces:
152 if pieces:
153 for p, l in zip(zip(*pieces), lines):
153 for p, l in zip(zip(*pieces), lines):
154 ui.write("%s: %s" % (" ".join(p), l[1]))
154 ui.write("%s: %s" % (" ".join(p), l[1]))
155
155
156 def archive(ui, repo, dest, **opts):
156 def archive(ui, repo, dest, **opts):
157 '''create an unversioned archive of a repository revision
157 '''create an unversioned archive of a repository revision
158
158
159 By default, the revision used is the parent of the working
159 By default, the revision used is the parent of the working
160 directory; use -r/--rev to specify a different revision.
160 directory; use -r/--rev to specify a different revision.
161
161
162 The archive type is automatically detected based on file
162 The archive type is automatically detected based on file
163 extension (or override using -t/--type).
163 extension (or override using -t/--type).
164
164
165 Valid types are:
165 Valid types are:
166
166
167 :``files``: a directory full of files (default)
167 :``files``: a directory full of files (default)
168 :``tar``: tar archive, uncompressed
168 :``tar``: tar archive, uncompressed
169 :``tbz2``: tar archive, compressed using bzip2
169 :``tbz2``: tar archive, compressed using bzip2
170 :``tgz``: tar archive, compressed using gzip
170 :``tgz``: tar archive, compressed using gzip
171 :``uzip``: zip archive, uncompressed
171 :``uzip``: zip archive, uncompressed
172 :``zip``: zip archive, compressed using deflate
172 :``zip``: zip archive, compressed using deflate
173
173
174 The exact name of the destination archive or directory is given
174 The exact name of the destination archive or directory is given
175 using a format string; see :hg:`help export` for details.
175 using a format string; see :hg:`help export` for details.
176
176
177 Each member added to an archive file has a directory prefix
177 Each member added to an archive file has a directory prefix
178 prepended. Use -p/--prefix to specify a format string for the
178 prepended. Use -p/--prefix to specify a format string for the
179 prefix. The default is the basename of the archive, with suffixes
179 prefix. The default is the basename of the archive, with suffixes
180 removed.
180 removed.
181
181
182 Returns 0 on success.
182 Returns 0 on success.
183 '''
183 '''
184
184
185 ctx = cmdutil.revsingle(repo, opts.get('rev'))
185 ctx = cmdutil.revsingle(repo, opts.get('rev'))
186 if not ctx:
186 if not ctx:
187 raise util.Abort(_('no working directory: please specify a revision'))
187 raise util.Abort(_('no working directory: please specify a revision'))
188 node = ctx.node()
188 node = ctx.node()
189 dest = cmdutil.make_filename(repo, dest, node)
189 dest = cmdutil.make_filename(repo, dest, node)
190 if os.path.realpath(dest) == repo.root:
190 if os.path.realpath(dest) == repo.root:
191 raise util.Abort(_('repository root cannot be destination'))
191 raise util.Abort(_('repository root cannot be destination'))
192
192
193 kind = opts.get('type') or archival.guesskind(dest) or 'files'
193 kind = opts.get('type') or archival.guesskind(dest) or 'files'
194 prefix = opts.get('prefix')
194 prefix = opts.get('prefix')
195
195
196 if dest == '-':
196 if dest == '-':
197 if kind == 'files':
197 if kind == 'files':
198 raise util.Abort(_('cannot archive plain files to stdout'))
198 raise util.Abort(_('cannot archive plain files to stdout'))
199 dest = sys.stdout
199 dest = sys.stdout
200 if not prefix:
200 if not prefix:
201 prefix = os.path.basename(repo.root) + '-%h'
201 prefix = os.path.basename(repo.root) + '-%h'
202
202
203 prefix = cmdutil.make_filename(repo, prefix, node)
203 prefix = cmdutil.make_filename(repo, prefix, node)
204 matchfn = cmdutil.match(repo, [], opts)
204 matchfn = cmdutil.match(repo, [], opts)
205 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
205 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
206 matchfn, prefix, subrepos=opts.get('subrepos'))
206 matchfn, prefix, subrepos=opts.get('subrepos'))
207
207
208 def backout(ui, repo, node=None, rev=None, **opts):
208 def backout(ui, repo, node=None, rev=None, **opts):
209 '''reverse effect of earlier changeset
209 '''reverse effect of earlier changeset
210
210
211 Prepare a new changeset with the effect of REV undone in the
211 Prepare a new changeset with the effect of REV undone in the
212 current working directory.
212 current working directory.
213
213
214 If REV is the parent of the working directory, then this new changeset
214 If REV is the parent of the working directory, then this new changeset
215 is committed automatically. Otherwise, hg needs to merge the
215 is committed automatically. Otherwise, hg needs to merge the
216 changes and the merged result is left uncommitted.
216 changes and the merged result is left uncommitted.
217
217
218 By default, the pending changeset will have one parent,
218 By default, the pending changeset will have one parent,
219 maintaining a linear history. With --merge, the pending changeset
219 maintaining a linear history. With --merge, the pending changeset
220 will instead have two parents: the old parent of the working
220 will instead have two parents: the old parent of the working
221 directory and a new child of REV that simply undoes REV.
221 directory and a new child of REV that simply undoes REV.
222
222
223 Before version 1.7, the behavior without --merge was equivalent to
223 Before version 1.7, the behavior without --merge was equivalent to
224 specifying --merge followed by :hg:`update --clean .` to cancel
224 specifying --merge followed by :hg:`update --clean .` to cancel
225 the merge and leave the child of REV as a head to be merged
225 the merge and leave the child of REV as a head to be merged
226 separately.
226 separately.
227
227
228 See :hg:`help dates` for a list of formats valid for -d/--date.
228 See :hg:`help dates` for a list of formats valid for -d/--date.
229
229
230 Returns 0 on success.
230 Returns 0 on success.
231 '''
231 '''
232 if rev and node:
232 if rev and node:
233 raise util.Abort(_("please specify just one revision"))
233 raise util.Abort(_("please specify just one revision"))
234
234
235 if not rev:
235 if not rev:
236 rev = node
236 rev = node
237
237
238 if not rev:
238 if not rev:
239 raise util.Abort(_("please specify a revision to backout"))
239 raise util.Abort(_("please specify a revision to backout"))
240
240
241 date = opts.get('date')
241 date = opts.get('date')
242 if date:
242 if date:
243 opts['date'] = util.parsedate(date)
243 opts['date'] = util.parsedate(date)
244
244
245 cmdutil.bail_if_changed(repo)
245 cmdutil.bail_if_changed(repo)
246 node = cmdutil.revsingle(repo, rev).node()
246 node = cmdutil.revsingle(repo, rev).node()
247
247
248 op1, op2 = repo.dirstate.parents()
248 op1, op2 = repo.dirstate.parents()
249 a = repo.changelog.ancestor(op1, node)
249 a = repo.changelog.ancestor(op1, node)
250 if a != node:
250 if a != node:
251 raise util.Abort(_('cannot backout change on a different branch'))
251 raise util.Abort(_('cannot backout change on a different branch'))
252
252
253 p1, p2 = repo.changelog.parents(node)
253 p1, p2 = repo.changelog.parents(node)
254 if p1 == nullid:
254 if p1 == nullid:
255 raise util.Abort(_('cannot backout a change with no parents'))
255 raise util.Abort(_('cannot backout a change with no parents'))
256 if p2 != nullid:
256 if p2 != nullid:
257 if not opts.get('parent'):
257 if not opts.get('parent'):
258 raise util.Abort(_('cannot backout a merge changeset without '
258 raise util.Abort(_('cannot backout a merge changeset without '
259 '--parent'))
259 '--parent'))
260 p = repo.lookup(opts['parent'])
260 p = repo.lookup(opts['parent'])
261 if p not in (p1, p2):
261 if p not in (p1, p2):
262 raise util.Abort(_('%s is not a parent of %s') %
262 raise util.Abort(_('%s is not a parent of %s') %
263 (short(p), short(node)))
263 (short(p), short(node)))
264 parent = p
264 parent = p
265 else:
265 else:
266 if opts.get('parent'):
266 if opts.get('parent'):
267 raise util.Abort(_('cannot use --parent on non-merge changeset'))
267 raise util.Abort(_('cannot use --parent on non-merge changeset'))
268 parent = p1
268 parent = p1
269
269
270 # the backout should appear on the same branch
270 # the backout should appear on the same branch
271 branch = repo.dirstate.branch()
271 branch = repo.dirstate.branch()
272 hg.clean(repo, node, show_stats=False)
272 hg.clean(repo, node, show_stats=False)
273 repo.dirstate.setbranch(branch)
273 repo.dirstate.setbranch(branch)
274 revert_opts = opts.copy()
274 revert_opts = opts.copy()
275 revert_opts['date'] = None
275 revert_opts['date'] = None
276 revert_opts['all'] = True
276 revert_opts['all'] = True
277 revert_opts['rev'] = hex(parent)
277 revert_opts['rev'] = hex(parent)
278 revert_opts['no_backup'] = None
278 revert_opts['no_backup'] = None
279 revert(ui, repo, **revert_opts)
279 revert(ui, repo, **revert_opts)
280 if not opts.get('merge') and op1 != node:
280 if not opts.get('merge') and op1 != node:
281 try:
281 try:
282 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
282 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
283 return hg.update(repo, op1)
283 return hg.update(repo, op1)
284 finally:
284 finally:
285 ui.setconfig('ui', 'forcemerge', '')
285 ui.setconfig('ui', 'forcemerge', '')
286
286
287 commit_opts = opts.copy()
287 commit_opts = opts.copy()
288 commit_opts['addremove'] = False
288 commit_opts['addremove'] = False
289 if not commit_opts['message'] and not commit_opts['logfile']:
289 if not commit_opts['message'] and not commit_opts['logfile']:
290 # we don't translate commit messages
290 # we don't translate commit messages
291 commit_opts['message'] = "Backed out changeset %s" % short(node)
291 commit_opts['message'] = "Backed out changeset %s" % short(node)
292 commit_opts['force_editor'] = True
292 commit_opts['force_editor'] = True
293 commit(ui, repo, **commit_opts)
293 commit(ui, repo, **commit_opts)
294 def nice(node):
294 def nice(node):
295 return '%d:%s' % (repo.changelog.rev(node), short(node))
295 return '%d:%s' % (repo.changelog.rev(node), short(node))
296 ui.status(_('changeset %s backs out changeset %s\n') %
296 ui.status(_('changeset %s backs out changeset %s\n') %
297 (nice(repo.changelog.tip()), nice(node)))
297 (nice(repo.changelog.tip()), nice(node)))
298 if opts.get('merge') and op1 != node:
298 if opts.get('merge') and op1 != node:
299 hg.clean(repo, op1, show_stats=False)
299 hg.clean(repo, op1, show_stats=False)
300 ui.status(_('merging with changeset %s\n')
300 ui.status(_('merging with changeset %s\n')
301 % nice(repo.changelog.tip()))
301 % nice(repo.changelog.tip()))
302 try:
302 try:
303 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
303 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
304 return hg.merge(repo, hex(repo.changelog.tip()))
304 return hg.merge(repo, hex(repo.changelog.tip()))
305 finally:
305 finally:
306 ui.setconfig('ui', 'forcemerge', '')
306 ui.setconfig('ui', 'forcemerge', '')
307 return 0
307 return 0
308
308
309 def bisect(ui, repo, rev=None, extra=None, command=None,
309 def bisect(ui, repo, rev=None, extra=None, command=None,
310 reset=None, good=None, bad=None, skip=None, extend=None,
310 reset=None, good=None, bad=None, skip=None, extend=None,
311 noupdate=None):
311 noupdate=None):
312 """subdivision search of changesets
312 """subdivision search of changesets
313
313
314 This command helps to find changesets which introduce problems. To
314 This command helps to find changesets which introduce problems. To
315 use, mark the earliest changeset you know exhibits the problem as
315 use, mark the earliest changeset you know exhibits the problem as
316 bad, then mark the latest changeset which is free from the problem
316 bad, then mark the latest changeset which is free from the problem
317 as good. Bisect will update your working directory to a revision
317 as good. Bisect will update your working directory to a revision
318 for testing (unless the -U/--noupdate option is specified). Once
318 for testing (unless the -U/--noupdate option is specified). Once
319 you have performed tests, mark the working directory as good or
319 you have performed tests, mark the working directory as good or
320 bad, and bisect will either update to another candidate changeset
320 bad, and bisect will either update to another candidate changeset
321 or announce that it has found the bad revision.
321 or announce that it has found the bad revision.
322
322
323 As a shortcut, you can also use the revision argument to mark a
323 As a shortcut, you can also use the revision argument to mark a
324 revision as good or bad without checking it out first.
324 revision as good or bad without checking it out first.
325
325
326 If you supply a command, it will be used for automatic bisection.
326 If you supply a command, it will be used for automatic bisection.
327 Its exit status will be used to mark revisions as good or bad:
327 Its exit status will be used to mark revisions as good or bad:
328 status 0 means good, 125 means to skip the revision, 127
328 status 0 means good, 125 means to skip the revision, 127
329 (command not found) will abort the bisection, and any other
329 (command not found) will abort the bisection, and any other
330 non-zero exit status means the revision is bad.
330 non-zero exit status means the revision is bad.
331
331
332 Returns 0 on success.
332 Returns 0 on success.
333 """
333 """
334 def extendbisectrange(nodes, good):
334 def extendbisectrange(nodes, good):
335 # bisect is incomplete when it ends on a merge node and
335 # bisect is incomplete when it ends on a merge node and
336 # one of the parent was not checked.
336 # one of the parent was not checked.
337 parents = repo[nodes[0]].parents()
337 parents = repo[nodes[0]].parents()
338 if len(parents) > 1:
338 if len(parents) > 1:
339 side = good and state['bad'] or state['good']
339 side = good and state['bad'] or state['good']
340 num = len(set(i.node() for i in parents) & set(side))
340 num = len(set(i.node() for i in parents) & set(side))
341 if num == 1:
341 if num == 1:
342 return parents[0].ancestor(parents[1])
342 return parents[0].ancestor(parents[1])
343 return None
343 return None
344
344
345 def print_result(nodes, good):
345 def print_result(nodes, good):
346 displayer = cmdutil.show_changeset(ui, repo, {})
346 displayer = cmdutil.show_changeset(ui, repo, {})
347 if len(nodes) == 1:
347 if len(nodes) == 1:
348 # narrowed it down to a single revision
348 # narrowed it down to a single revision
349 if good:
349 if good:
350 ui.write(_("The first good revision is:\n"))
350 ui.write(_("The first good revision is:\n"))
351 else:
351 else:
352 ui.write(_("The first bad revision is:\n"))
352 ui.write(_("The first bad revision is:\n"))
353 displayer.show(repo[nodes[0]])
353 displayer.show(repo[nodes[0]])
354 extendnode = extendbisectrange(nodes, good)
354 extendnode = extendbisectrange(nodes, good)
355 if extendnode is not None:
355 if extendnode is not None:
356 ui.write(_('Not all ancestors of this changeset have been'
356 ui.write(_('Not all ancestors of this changeset have been'
357 ' checked.\nUse bisect --extend to continue the '
357 ' checked.\nUse bisect --extend to continue the '
358 'bisection from\nthe common ancestor, %s.\n')
358 'bisection from\nthe common ancestor, %s.\n')
359 % extendnode)
359 % extendnode)
360 else:
360 else:
361 # multiple possible revisions
361 # multiple possible revisions
362 if good:
362 if good:
363 ui.write(_("Due to skipped revisions, the first "
363 ui.write(_("Due to skipped revisions, the first "
364 "good revision could be any of:\n"))
364 "good revision could be any of:\n"))
365 else:
365 else:
366 ui.write(_("Due to skipped revisions, the first "
366 ui.write(_("Due to skipped revisions, the first "
367 "bad revision could be any of:\n"))
367 "bad revision could be any of:\n"))
368 for n in nodes:
368 for n in nodes:
369 displayer.show(repo[n])
369 displayer.show(repo[n])
370 displayer.close()
370 displayer.close()
371
371
372 def check_state(state, interactive=True):
372 def check_state(state, interactive=True):
373 if not state['good'] or not state['bad']:
373 if not state['good'] or not state['bad']:
374 if (good or bad or skip or reset) and interactive:
374 if (good or bad or skip or reset) and interactive:
375 return
375 return
376 if not state['good']:
376 if not state['good']:
377 raise util.Abort(_('cannot bisect (no known good revisions)'))
377 raise util.Abort(_('cannot bisect (no known good revisions)'))
378 else:
378 else:
379 raise util.Abort(_('cannot bisect (no known bad revisions)'))
379 raise util.Abort(_('cannot bisect (no known bad revisions)'))
380 return True
380 return True
381
381
382 # backward compatibility
382 # backward compatibility
383 if rev in "good bad reset init".split():
383 if rev in "good bad reset init".split():
384 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
384 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
385 cmd, rev, extra = rev, extra, None
385 cmd, rev, extra = rev, extra, None
386 if cmd == "good":
386 if cmd == "good":
387 good = True
387 good = True
388 elif cmd == "bad":
388 elif cmd == "bad":
389 bad = True
389 bad = True
390 else:
390 else:
391 reset = True
391 reset = True
392 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
392 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
393 raise util.Abort(_('incompatible arguments'))
393 raise util.Abort(_('incompatible arguments'))
394
394
395 if reset:
395 if reset:
396 p = repo.join("bisect.state")
396 p = repo.join("bisect.state")
397 if os.path.exists(p):
397 if os.path.exists(p):
398 os.unlink(p)
398 os.unlink(p)
399 return
399 return
400
400
401 state = hbisect.load_state(repo)
401 state = hbisect.load_state(repo)
402
402
403 if command:
403 if command:
404 changesets = 1
404 changesets = 1
405 try:
405 try:
406 while changesets:
406 while changesets:
407 # update state
407 # update state
408 status = util.system(command)
408 status = util.system(command)
409 if status == 125:
409 if status == 125:
410 transition = "skip"
410 transition = "skip"
411 elif status == 0:
411 elif status == 0:
412 transition = "good"
412 transition = "good"
413 # status < 0 means process was killed
413 # status < 0 means process was killed
414 elif status == 127:
414 elif status == 127:
415 raise util.Abort(_("failed to execute %s") % command)
415 raise util.Abort(_("failed to execute %s") % command)
416 elif status < 0:
416 elif status < 0:
417 raise util.Abort(_("%s killed") % command)
417 raise util.Abort(_("%s killed") % command)
418 else:
418 else:
419 transition = "bad"
419 transition = "bad"
420 ctx = cmdutil.revsingle(repo, rev)
420 ctx = cmdutil.revsingle(repo, rev)
421 rev = None # clear for future iterations
421 rev = None # clear for future iterations
422 state[transition].append(ctx.node())
422 state[transition].append(ctx.node())
423 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
423 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
424 check_state(state, interactive=False)
424 check_state(state, interactive=False)
425 # bisect
425 # bisect
426 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
426 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
427 # update to next check
427 # update to next check
428 cmdutil.bail_if_changed(repo)
428 cmdutil.bail_if_changed(repo)
429 hg.clean(repo, nodes[0], show_stats=False)
429 hg.clean(repo, nodes[0], show_stats=False)
430 finally:
430 finally:
431 hbisect.save_state(repo, state)
431 hbisect.save_state(repo, state)
432 print_result(nodes, good)
432 print_result(nodes, good)
433 return
433 return
434
434
435 # update state
435 # update state
436
436
437 if rev:
437 if rev:
438 nodes = [repo.lookup(i) for i in cmdutil.revrange(repo, [rev])]
438 nodes = [repo.lookup(i) for i in cmdutil.revrange(repo, [rev])]
439 else:
439 else:
440 nodes = [repo.lookup('.')]
440 nodes = [repo.lookup('.')]
441
441
442 if good or bad or skip:
442 if good or bad or skip:
443 if good:
443 if good:
444 state['good'] += nodes
444 state['good'] += nodes
445 elif bad:
445 elif bad:
446 state['bad'] += nodes
446 state['bad'] += nodes
447 elif skip:
447 elif skip:
448 state['skip'] += nodes
448 state['skip'] += nodes
449 hbisect.save_state(repo, state)
449 hbisect.save_state(repo, state)
450
450
451 if not check_state(state):
451 if not check_state(state):
452 return
452 return
453
453
454 # actually bisect
454 # actually bisect
455 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
455 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
456 if extend:
456 if extend:
457 if not changesets:
457 if not changesets:
458 extendnode = extendbisectrange(nodes, good)
458 extendnode = extendbisectrange(nodes, good)
459 if extendnode is not None:
459 if extendnode is not None:
460 ui.write(_("Extending search to changeset %d:%s\n"
460 ui.write(_("Extending search to changeset %d:%s\n"
461 % (extendnode.rev(), extendnode)))
461 % (extendnode.rev(), extendnode)))
462 if noupdate:
462 if noupdate:
463 return
463 return
464 cmdutil.bail_if_changed(repo)
464 cmdutil.bail_if_changed(repo)
465 return hg.clean(repo, extendnode.node())
465 return hg.clean(repo, extendnode.node())
466 raise util.Abort(_("nothing to extend"))
466 raise util.Abort(_("nothing to extend"))
467
467
468 if changesets == 0:
468 if changesets == 0:
469 print_result(nodes, good)
469 print_result(nodes, good)
470 else:
470 else:
471 assert len(nodes) == 1 # only a single node can be tested next
471 assert len(nodes) == 1 # only a single node can be tested next
472 node = nodes[0]
472 node = nodes[0]
473 # compute the approximate number of remaining tests
473 # compute the approximate number of remaining tests
474 tests, size = 0, 2
474 tests, size = 0, 2
475 while size <= changesets:
475 while size <= changesets:
476 tests, size = tests + 1, size * 2
476 tests, size = tests + 1, size * 2
477 rev = repo.changelog.rev(node)
477 rev = repo.changelog.rev(node)
478 ui.write(_("Testing changeset %d:%s "
478 ui.write(_("Testing changeset %d:%s "
479 "(%d changesets remaining, ~%d tests)\n")
479 "(%d changesets remaining, ~%d tests)\n")
480 % (rev, short(node), changesets, tests))
480 % (rev, short(node), changesets, tests))
481 if not noupdate:
481 if not noupdate:
482 cmdutil.bail_if_changed(repo)
482 cmdutil.bail_if_changed(repo)
483 return hg.clean(repo, node)
483 return hg.clean(repo, node)
484
484
485 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None):
485 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None):
486 '''track a line of development with movable markers
486 '''track a line of development with movable markers
487
487
488 Bookmarks are pointers to certain commits that move when
488 Bookmarks are pointers to certain commits that move when
489 committing. Bookmarks are local. They can be renamed, copied and
489 committing. Bookmarks are local. They can be renamed, copied and
490 deleted. It is possible to use bookmark names in :hg:`merge` and
490 deleted. It is possible to use bookmark names in :hg:`merge` and
491 :hg:`update` to merge and update respectively to a given bookmark.
491 :hg:`update` to merge and update respectively to a given bookmark.
492
492
493 You can use :hg:`bookmark NAME` to set a bookmark on the working
493 You can use :hg:`bookmark NAME` to set a bookmark on the working
494 directory's parent revision with the given name. If you specify
494 directory's parent revision with the given name. If you specify
495 a revision using -r REV (where REV may be an existing bookmark),
495 a revision using -r REV (where REV may be an existing bookmark),
496 the bookmark is assigned to that revision.
496 the bookmark is assigned to that revision.
497
497
498 Bookmarks can be pushed and pulled between repositories (see :hg:`help
498 Bookmarks can be pushed and pulled between repositories (see :hg:`help
499 push` and :hg:`help pull`). This requires both the local and remote
499 push` and :hg:`help pull`). This requires both the local and remote
500 repositories to support bookmarks. For versions prior to 1.8, this means
500 repositories to support bookmarks. For versions prior to 1.8, this means
501 the bookmarks extension must be enabled.
501 the bookmarks extension must be enabled.
502 '''
502 '''
503 hexfn = ui.debugflag and hex or short
503 hexfn = ui.debugflag and hex or short
504 marks = repo._bookmarks
504 marks = repo._bookmarks
505 cur = repo.changectx('.').node()
505 cur = repo.changectx('.').node()
506
506
507 if rename:
507 if rename:
508 if rename not in marks:
508 if rename not in marks:
509 raise util.Abort(_("bookmark '%s' does not exist") % rename)
509 raise util.Abort(_("bookmark '%s' does not exist") % rename)
510 if mark in marks and not force:
510 if mark in marks and not force:
511 raise util.Abort(_("bookmark '%s' already exists "
511 raise util.Abort(_("bookmark '%s' already exists "
512 "(use -f to force)") % mark)
512 "(use -f to force)") % mark)
513 if mark is None:
513 if mark is None:
514 raise util.Abort(_("new bookmark name required"))
514 raise util.Abort(_("new bookmark name required"))
515 marks[mark] = marks[rename]
515 marks[mark] = marks[rename]
516 if repo._bookmarkcurrent == rename:
516 if repo._bookmarkcurrent == rename:
517 bookmarks.setcurrent(repo, mark)
517 bookmarks.setcurrent(repo, mark)
518 del marks[rename]
518 del marks[rename]
519 bookmarks.write(repo)
519 bookmarks.write(repo)
520 return
520 return
521
521
522 if delete:
522 if delete:
523 if mark is None:
523 if mark is None:
524 raise util.Abort(_("bookmark name required"))
524 raise util.Abort(_("bookmark name required"))
525 if mark not in marks:
525 if mark not in marks:
526 raise util.Abort(_("bookmark '%s' does not exist") % mark)
526 raise util.Abort(_("bookmark '%s' does not exist") % mark)
527 if mark == repo._bookmarkcurrent:
527 if mark == repo._bookmarkcurrent:
528 bookmarks.setcurrent(repo, None)
528 bookmarks.setcurrent(repo, None)
529 del marks[mark]
529 del marks[mark]
530 bookmarks.write(repo)
530 bookmarks.write(repo)
531 return
531 return
532
532
533 if mark is not None:
533 if mark is not None:
534 if "\n" in mark:
534 if "\n" in mark:
535 raise util.Abort(_("bookmark name cannot contain newlines"))
535 raise util.Abort(_("bookmark name cannot contain newlines"))
536 mark = mark.strip()
536 mark = mark.strip()
537 if not mark:
537 if not mark:
538 raise util.Abort(_("bookmark names cannot consist entirely of "
538 raise util.Abort(_("bookmark names cannot consist entirely of "
539 "whitespace"))
539 "whitespace"))
540 if mark in marks and not force:
540 if mark in marks and not force:
541 raise util.Abort(_("bookmark '%s' already exists "
541 raise util.Abort(_("bookmark '%s' already exists "
542 "(use -f to force)") % mark)
542 "(use -f to force)") % mark)
543 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
543 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
544 and not force):
544 and not force):
545 raise util.Abort(
545 raise util.Abort(
546 _("a bookmark cannot have the name of an existing branch"))
546 _("a bookmark cannot have the name of an existing branch"))
547 if rev:
547 if rev:
548 marks[mark] = repo.lookup(rev)
548 marks[mark] = repo.lookup(rev)
549 else:
549 else:
550 marks[mark] = repo.changectx('.').node()
550 marks[mark] = repo.changectx('.').node()
551 if repo.changectx('.').node() == marks[mark]:
551 if repo.changectx('.').node() == marks[mark]:
552 bookmarks.setcurrent(repo, mark)
552 bookmarks.setcurrent(repo, mark)
553 bookmarks.write(repo)
553 bookmarks.write(repo)
554 return
554 return
555
555
556 if mark is None:
556 if mark is None:
557 if rev:
557 if rev:
558 raise util.Abort(_("bookmark name required"))
558 raise util.Abort(_("bookmark name required"))
559 if len(marks) == 0:
559 if len(marks) == 0:
560 ui.status(_("no bookmarks set\n"))
560 ui.status(_("no bookmarks set\n"))
561 else:
561 else:
562 for bmark, n in sorted(marks.iteritems()):
562 for bmark, n in sorted(marks.iteritems()):
563 current = repo._bookmarkcurrent
563 current = repo._bookmarkcurrent
564 if bmark == current and n == cur:
564 if bmark == current and n == cur:
565 prefix, label = '*', 'bookmarks.current'
565 prefix, label = '*', 'bookmarks.current'
566 else:
566 else:
567 prefix, label = ' ', ''
567 prefix, label = ' ', ''
568
568
569 if ui.quiet:
569 if ui.quiet:
570 ui.write("%s\n" % bmark, label=label)
570 ui.write("%s\n" % bmark, label=label)
571 else:
571 else:
572 ui.write(" %s %-25s %d:%s\n" % (
572 ui.write(" %s %-25s %d:%s\n" % (
573 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
573 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
574 label=label)
574 label=label)
575 return
575 return
576
576
577 def branch(ui, repo, label=None, **opts):
577 def branch(ui, repo, label=None, **opts):
578 """set or show the current branch name
578 """set or show the current branch name
579
579
580 With no argument, show the current branch name. With one argument,
580 With no argument, show the current branch name. With one argument,
581 set the working directory branch name (the branch will not exist
581 set the working directory branch name (the branch will not exist
582 in the repository until the next commit). Standard practice
582 in the repository until the next commit). Standard practice
583 recommends that primary development take place on the 'default'
583 recommends that primary development take place on the 'default'
584 branch.
584 branch.
585
585
586 Unless -f/--force is specified, branch will not let you set a
586 Unless -f/--force is specified, branch will not let you set a
587 branch name that already exists, even if it's inactive.
587 branch name that already exists, even if it's inactive.
588
588
589 Use -C/--clean to reset the working directory branch to that of
589 Use -C/--clean to reset the working directory branch to that of
590 the parent of the working directory, negating a previous branch
590 the parent of the working directory, negating a previous branch
591 change.
591 change.
592
592
593 Use the command :hg:`update` to switch to an existing branch. Use
593 Use the command :hg:`update` to switch to an existing branch. Use
594 :hg:`commit --close-branch` to mark this branch as closed.
594 :hg:`commit --close-branch` to mark this branch as closed.
595
595
596 Returns 0 on success.
596 Returns 0 on success.
597 """
597 """
598
598
599 if opts.get('clean'):
599 if opts.get('clean'):
600 label = repo[None].p1().branch()
600 label = repo[None].p1().branch()
601 repo.dirstate.setbranch(label)
601 repo.dirstate.setbranch(label)
602 ui.status(_('reset working directory to branch %s\n') % label)
602 ui.status(_('reset working directory to branch %s\n') % label)
603 elif label:
603 elif label:
604 if not opts.get('force') and label in repo.branchtags():
604 if not opts.get('force') and label in repo.branchtags():
605 if label not in [p.branch() for p in repo.parents()]:
605 if label not in [p.branch() for p in repo.parents()]:
606 raise util.Abort(_('a branch of the same name already exists'
606 raise util.Abort(_('a branch of the same name already exists'
607 " (use 'hg update' to switch to it)"))
607 " (use 'hg update' to switch to it)"))
608 repo.dirstate.setbranch(label)
608 repo.dirstate.setbranch(label)
609 ui.status(_('marked working directory as branch %s\n') % label)
609 ui.status(_('marked working directory as branch %s\n') % label)
610 else:
610 else:
611 ui.write("%s\n" % repo.dirstate.branch())
611 ui.write("%s\n" % repo.dirstate.branch())
612
612
613 def branches(ui, repo, active=False, closed=False):
613 def branches(ui, repo, active=False, closed=False):
614 """list repository named branches
614 """list repository named branches
615
615
616 List the repository's named branches, indicating which ones are
616 List the repository's named branches, indicating which ones are
617 inactive. If -c/--closed is specified, also list branches which have
617 inactive. If -c/--closed is specified, also list branches which have
618 been marked closed (see :hg:`commit --close-branch`).
618 been marked closed (see :hg:`commit --close-branch`).
619
619
620 If -a/--active is specified, only show active branches. A branch
620 If -a/--active is specified, only show active branches. A branch
621 is considered active if it contains repository heads.
621 is considered active if it contains repository heads.
622
622
623 Use the command :hg:`update` to switch to an existing branch.
623 Use the command :hg:`update` to switch to an existing branch.
624
624
625 Returns 0.
625 Returns 0.
626 """
626 """
627
627
628 hexfunc = ui.debugflag and hex or short
628 hexfunc = ui.debugflag and hex or short
629 activebranches = [repo[n].branch() for n in repo.heads()]
629 activebranches = [repo[n].branch() for n in repo.heads()]
630 def testactive(tag, node):
630 def testactive(tag, node):
631 realhead = tag in activebranches
631 realhead = tag in activebranches
632 open = node in repo.branchheads(tag, closed=False)
632 open = node in repo.branchheads(tag, closed=False)
633 return realhead and open
633 return realhead and open
634 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
634 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
635 for tag, node in repo.branchtags().items()],
635 for tag, node in repo.branchtags().items()],
636 reverse=True)
636 reverse=True)
637
637
638 for isactive, node, tag in branches:
638 for isactive, node, tag in branches:
639 if (not active) or isactive:
639 if (not active) or isactive:
640 if ui.quiet:
640 if ui.quiet:
641 ui.write("%s\n" % tag)
641 ui.write("%s\n" % tag)
642 else:
642 else:
643 hn = repo.lookup(node)
643 hn = repo.lookup(node)
644 if isactive:
644 if isactive:
645 label = 'branches.active'
645 label = 'branches.active'
646 notice = ''
646 notice = ''
647 elif hn not in repo.branchheads(tag, closed=False):
647 elif hn not in repo.branchheads(tag, closed=False):
648 if not closed:
648 if not closed:
649 continue
649 continue
650 label = 'branches.closed'
650 label = 'branches.closed'
651 notice = _(' (closed)')
651 notice = _(' (closed)')
652 else:
652 else:
653 label = 'branches.inactive'
653 label = 'branches.inactive'
654 notice = _(' (inactive)')
654 notice = _(' (inactive)')
655 if tag == repo.dirstate.branch():
655 if tag == repo.dirstate.branch():
656 label = 'branches.current'
656 label = 'branches.current'
657 rev = str(node).rjust(31 - encoding.colwidth(tag))
657 rev = str(node).rjust(31 - encoding.colwidth(tag))
658 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
658 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
659 tag = ui.label(tag, label)
659 tag = ui.label(tag, label)
660 ui.write("%s %s%s\n" % (tag, rev, notice))
660 ui.write("%s %s%s\n" % (tag, rev, notice))
661
661
662 def bundle(ui, repo, fname, dest=None, **opts):
662 def bundle(ui, repo, fname, dest=None, **opts):
663 """create a changegroup file
663 """create a changegroup file
664
664
665 Generate a compressed changegroup file collecting changesets not
665 Generate a compressed changegroup file collecting changesets not
666 known to be in another repository.
666 known to be in another repository.
667
667
668 If you omit the destination repository, then hg assumes the
668 If you omit the destination repository, then hg assumes the
669 destination will have all the nodes you specify with --base
669 destination will have all the nodes you specify with --base
670 parameters. To create a bundle containing all changesets, use
670 parameters. To create a bundle containing all changesets, use
671 -a/--all (or --base null).
671 -a/--all (or --base null).
672
672
673 You can change compression method with the -t/--type option.
673 You can change compression method with the -t/--type option.
674 The available compression methods are: none, bzip2, and
674 The available compression methods are: none, bzip2, and
675 gzip (by default, bundles are compressed using bzip2).
675 gzip (by default, bundles are compressed using bzip2).
676
676
677 The bundle file can then be transferred using conventional means
677 The bundle file can then be transferred using conventional means
678 and applied to another repository with the unbundle or pull
678 and applied to another repository with the unbundle or pull
679 command. This is useful when direct push and pull are not
679 command. This is useful when direct push and pull are not
680 available or when exporting an entire repository is undesirable.
680 available or when exporting an entire repository is undesirable.
681
681
682 Applying bundles preserves all changeset contents including
682 Applying bundles preserves all changeset contents including
683 permissions, copy/rename information, and revision history.
683 permissions, copy/rename information, and revision history.
684
684
685 Returns 0 on success, 1 if no changes found.
685 Returns 0 on success, 1 if no changes found.
686 """
686 """
687 revs = None
687 revs = None
688 if 'rev' in opts:
688 if 'rev' in opts:
689 revs = cmdutil.revrange(repo, opts['rev'])
689 revs = cmdutil.revrange(repo, opts['rev'])
690
690
691 if opts.get('all'):
691 if opts.get('all'):
692 base = ['null']
692 base = ['null']
693 else:
693 else:
694 base = cmdutil.revrange(repo, opts.get('base'))
694 base = cmdutil.revrange(repo, opts.get('base'))
695 if base:
695 if base:
696 if dest:
696 if dest:
697 raise util.Abort(_("--base is incompatible with specifying "
697 raise util.Abort(_("--base is incompatible with specifying "
698 "a destination"))
698 "a destination"))
699 common = [repo.lookup(rev) for rev in base]
699 common = [repo.lookup(rev) for rev in base]
700 else:
700 else:
701 dest = ui.expandpath(dest or 'default-push', dest or 'default')
701 dest = ui.expandpath(dest or 'default-push', dest or 'default')
702 dest, branches = hg.parseurl(dest, opts.get('branch'))
702 dest, branches = hg.parseurl(dest, opts.get('branch'))
703 other = hg.repository(hg.remoteui(repo, opts), dest)
703 other = hg.repository(hg.remoteui(repo, opts), dest)
704 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
704 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
705 inc = discovery.findcommonincoming(repo, other, force=opts.get('force'))
705 inc = discovery.findcommonincoming(repo, other, force=opts.get('force'))
706 common, _anyinc, _heads = inc
706 common, _anyinc, _heads = inc
707
707
708 nodes = revs and map(repo.lookup, revs) or revs
708 nodes = revs and map(repo.lookup, revs) or revs
709 cg = repo.getbundle('bundle', common=common, heads=nodes)
709 cg = repo.getbundle('bundle', common=common, heads=nodes)
710 if not cg:
710 if not cg:
711 ui.status(_("no changes found\n"))
711 ui.status(_("no changes found\n"))
712 return 1
712 return 1
713
713
714 bundletype = opts.get('type', 'bzip2').lower()
714 bundletype = opts.get('type', 'bzip2').lower()
715 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
715 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
716 bundletype = btypes.get(bundletype)
716 bundletype = btypes.get(bundletype)
717 if bundletype not in changegroup.bundletypes:
717 if bundletype not in changegroup.bundletypes:
718 raise util.Abort(_('unknown bundle type specified with --type'))
718 raise util.Abort(_('unknown bundle type specified with --type'))
719
719
720 changegroup.writebundle(cg, fname, bundletype)
720 changegroup.writebundle(cg, fname, bundletype)
721
721
722 def cat(ui, repo, file1, *pats, **opts):
722 def cat(ui, repo, file1, *pats, **opts):
723 """output the current or given revision of files
723 """output the current or given revision of files
724
724
725 Print the specified files as they were at the given revision. If
725 Print the specified files as they were at the given revision. If
726 no revision is given, the parent of the working directory is used,
726 no revision is given, the parent of the working directory is used,
727 or tip if no revision is checked out.
727 or tip if no revision is checked out.
728
728
729 Output may be to a file, in which case the name of the file is
729 Output may be to a file, in which case the name of the file is
730 given using a format string. The formatting rules are the same as
730 given using a format string. The formatting rules are the same as
731 for the export command, with the following additions:
731 for the export command, with the following additions:
732
732
733 :``%s``: basename of file being printed
733 :``%s``: basename of file being printed
734 :``%d``: dirname of file being printed, or '.' if in repository root
734 :``%d``: dirname of file being printed, or '.' if in repository root
735 :``%p``: root-relative path name of file being printed
735 :``%p``: root-relative path name of file being printed
736
736
737 Returns 0 on success.
737 Returns 0 on success.
738 """
738 """
739 ctx = cmdutil.revsingle(repo, opts.get('rev'))
739 ctx = cmdutil.revsingle(repo, opts.get('rev'))
740 err = 1
740 err = 1
741 m = cmdutil.match(repo, (file1,) + pats, opts)
741 m = cmdutil.match(repo, (file1,) + pats, opts)
742 for abs in ctx.walk(m):
742 for abs in ctx.walk(m):
743 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
743 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
744 data = ctx[abs].data()
744 data = ctx[abs].data()
745 if opts.get('decode'):
745 if opts.get('decode'):
746 data = repo.wwritedata(abs, data)
746 data = repo.wwritedata(abs, data)
747 fp.write(data)
747 fp.write(data)
748 fp.close()
748 fp.close()
749 err = 0
749 err = 0
750 return err
750 return err
751
751
752 def clone(ui, source, dest=None, **opts):
752 def clone(ui, source, dest=None, **opts):
753 """make a copy of an existing repository
753 """make a copy of an existing repository
754
754
755 Create a copy of an existing repository in a new directory.
755 Create a copy of an existing repository in a new directory.
756
756
757 If no destination directory name is specified, it defaults to the
757 If no destination directory name is specified, it defaults to the
758 basename of the source.
758 basename of the source.
759
759
760 The location of the source is added to the new repository's
760 The location of the source is added to the new repository's
761 ``.hg/hgrc`` file, as the default to be used for future pulls.
761 ``.hg/hgrc`` file, as the default to be used for future pulls.
762
762
763 See :hg:`help urls` for valid source format details.
763 See :hg:`help urls` for valid source format details.
764
764
765 It is possible to specify an ``ssh://`` URL as the destination, but no
765 It is possible to specify an ``ssh://`` URL as the destination, but no
766 ``.hg/hgrc`` and working directory will be created on the remote side.
766 ``.hg/hgrc`` and working directory will be created on the remote side.
767 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
767 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
768
768
769 A set of changesets (tags, or branch names) to pull may be specified
769 A set of changesets (tags, or branch names) to pull may be specified
770 by listing each changeset (tag, or branch name) with -r/--rev.
770 by listing each changeset (tag, or branch name) with -r/--rev.
771 If -r/--rev is used, the cloned repository will contain only a subset
771 If -r/--rev is used, the cloned repository will contain only a subset
772 of the changesets of the source repository. Only the set of changesets
772 of the changesets of the source repository. Only the set of changesets
773 defined by all -r/--rev options (including all their ancestors)
773 defined by all -r/--rev options (including all their ancestors)
774 will be pulled into the destination repository.
774 will be pulled into the destination repository.
775 No subsequent changesets (including subsequent tags) will be present
775 No subsequent changesets (including subsequent tags) will be present
776 in the destination.
776 in the destination.
777
777
778 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
778 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
779 local source repositories.
779 local source repositories.
780
780
781 For efficiency, hardlinks are used for cloning whenever the source
781 For efficiency, hardlinks are used for cloning whenever the source
782 and destination are on the same filesystem (note this applies only
782 and destination are on the same filesystem (note this applies only
783 to the repository data, not to the working directory). Some
783 to the repository data, not to the working directory). Some
784 filesystems, such as AFS, implement hardlinking incorrectly, but
784 filesystems, such as AFS, implement hardlinking incorrectly, but
785 do not report errors. In these cases, use the --pull option to
785 do not report errors. In these cases, use the --pull option to
786 avoid hardlinking.
786 avoid hardlinking.
787
787
788 In some cases, you can clone repositories and the working directory
788 In some cases, you can clone repositories and the working directory
789 using full hardlinks with ::
789 using full hardlinks with ::
790
790
791 $ cp -al REPO REPOCLONE
791 $ cp -al REPO REPOCLONE
792
792
793 This is the fastest way to clone, but it is not always safe. The
793 This is the fastest way to clone, but it is not always safe. The
794 operation is not atomic (making sure REPO is not modified during
794 operation is not atomic (making sure REPO is not modified during
795 the operation is up to you) and you have to make sure your editor
795 the operation is up to you) and you have to make sure your editor
796 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
796 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
797 this is not compatible with certain extensions that place their
797 this is not compatible with certain extensions that place their
798 metadata under the .hg directory, such as mq.
798 metadata under the .hg directory, such as mq.
799
799
800 Mercurial will update the working directory to the first applicable
800 Mercurial will update the working directory to the first applicable
801 revision from this list:
801 revision from this list:
802
802
803 a) null if -U or the source repository has no changesets
803 a) null if -U or the source repository has no changesets
804 b) if -u . and the source repository is local, the first parent of
804 b) if -u . and the source repository is local, the first parent of
805 the source repository's working directory
805 the source repository's working directory
806 c) the changeset specified with -u (if a branch name, this means the
806 c) the changeset specified with -u (if a branch name, this means the
807 latest head of that branch)
807 latest head of that branch)
808 d) the changeset specified with -r
808 d) the changeset specified with -r
809 e) the tipmost head specified with -b
809 e) the tipmost head specified with -b
810 f) the tipmost head specified with the url#branch source syntax
810 f) the tipmost head specified with the url#branch source syntax
811 g) the tipmost head of the default branch
811 g) the tipmost head of the default branch
812 h) tip
812 h) tip
813
813
814 Returns 0 on success.
814 Returns 0 on success.
815 """
815 """
816 if opts.get('noupdate') and opts.get('updaterev'):
816 if opts.get('noupdate') and opts.get('updaterev'):
817 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
817 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
818
818
819 r = hg.clone(hg.remoteui(ui, opts), source, dest,
819 r = hg.clone(hg.remoteui(ui, opts), source, dest,
820 pull=opts.get('pull'),
820 pull=opts.get('pull'),
821 stream=opts.get('uncompressed'),
821 stream=opts.get('uncompressed'),
822 rev=opts.get('rev'),
822 rev=opts.get('rev'),
823 update=opts.get('updaterev') or not opts.get('noupdate'),
823 update=opts.get('updaterev') or not opts.get('noupdate'),
824 branch=opts.get('branch'))
824 branch=opts.get('branch'))
825
825
826 return r is None
826 return r is None
827
827
828 def commit(ui, repo, *pats, **opts):
828 def commit(ui, repo, *pats, **opts):
829 """commit the specified files or all outstanding changes
829 """commit the specified files or all outstanding changes
830
830
831 Commit changes to the given files into the repository. Unlike a
831 Commit changes to the given files into the repository. Unlike a
832 centralized SCM, this operation is a local operation. See
832 centralized SCM, this operation is a local operation. See
833 :hg:`push` for a way to actively distribute your changes.
833 :hg:`push` for a way to actively distribute your changes.
834
834
835 If a list of files is omitted, all changes reported by :hg:`status`
835 If a list of files is omitted, all changes reported by :hg:`status`
836 will be committed.
836 will be committed.
837
837
838 If you are committing the result of a merge, do not provide any
838 If you are committing the result of a merge, do not provide any
839 filenames or -I/-X filters.
839 filenames or -I/-X filters.
840
840
841 If no commit message is specified, Mercurial starts your
841 If no commit message is specified, Mercurial starts your
842 configured editor where you can enter a message. In case your
842 configured editor where you can enter a message. In case your
843 commit fails, you will find a backup of your message in
843 commit fails, you will find a backup of your message in
844 ``.hg/last-message.txt``.
844 ``.hg/last-message.txt``.
845
845
846 See :hg:`help dates` for a list of formats valid for -d/--date.
846 See :hg:`help dates` for a list of formats valid for -d/--date.
847
847
848 Returns 0 on success, 1 if nothing changed.
848 Returns 0 on success, 1 if nothing changed.
849 """
849 """
850 extra = {}
850 extra = {}
851 if opts.get('close_branch'):
851 if opts.get('close_branch'):
852 if repo['.'].node() not in repo.branchheads():
852 if repo['.'].node() not in repo.branchheads():
853 # The topo heads set is included in the branch heads set of the
853 # The topo heads set is included in the branch heads set of the
854 # current branch, so it's sufficient to test branchheads
854 # current branch, so it's sufficient to test branchheads
855 raise util.Abort(_('can only close branch heads'))
855 raise util.Abort(_('can only close branch heads'))
856 extra['close'] = 1
856 extra['close'] = 1
857 e = cmdutil.commiteditor
857 e = cmdutil.commiteditor
858 if opts.get('force_editor'):
858 if opts.get('force_editor'):
859 e = cmdutil.commitforceeditor
859 e = cmdutil.commitforceeditor
860
860
861 def commitfunc(ui, repo, message, match, opts):
861 def commitfunc(ui, repo, message, match, opts):
862 return repo.commit(message, opts.get('user'), opts.get('date'), match,
862 return repo.commit(message, opts.get('user'), opts.get('date'), match,
863 editor=e, extra=extra)
863 editor=e, extra=extra)
864
864
865 branch = repo[None].branch()
865 branch = repo[None].branch()
866 bheads = repo.branchheads(branch)
866 bheads = repo.branchheads(branch)
867
867
868 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
868 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
869 if not node:
869 if not node:
870 stat = repo.status(match=cmdutil.match(repo, pats, opts))
870 stat = repo.status(match=cmdutil.match(repo, pats, opts))
871 if stat[3]:
871 if stat[3]:
872 ui.status(_("nothing changed (%d missing files, see 'hg status')\n")
872 ui.status(_("nothing changed (%d missing files, see 'hg status')\n")
873 % len(stat[3]))
873 % len(stat[3]))
874 else:
874 else:
875 ui.status(_("nothing changed\n"))
875 ui.status(_("nothing changed\n"))
876 return 1
876 return 1
877
877
878 ctx = repo[node]
878 ctx = repo[node]
879 parents = ctx.parents()
879 parents = ctx.parents()
880
880
881 if bheads and not [x for x in parents
881 if bheads and not [x for x in parents
882 if x.node() in bheads and x.branch() == branch]:
882 if x.node() in bheads and x.branch() == branch]:
883 ui.status(_('created new head\n'))
883 ui.status(_('created new head\n'))
884 # The message is not printed for initial roots. For the other
884 # The message is not printed for initial roots. For the other
885 # changesets, it is printed in the following situations:
885 # changesets, it is printed in the following situations:
886 #
886 #
887 # Par column: for the 2 parents with ...
887 # Par column: for the 2 parents with ...
888 # N: null or no parent
888 # N: null or no parent
889 # B: parent is on another named branch
889 # B: parent is on another named branch
890 # C: parent is a regular non head changeset
890 # C: parent is a regular non head changeset
891 # H: parent was a branch head of the current branch
891 # H: parent was a branch head of the current branch
892 # Msg column: whether we print "created new head" message
892 # Msg column: whether we print "created new head" message
893 # In the following, it is assumed that there already exists some
893 # In the following, it is assumed that there already exists some
894 # initial branch heads of the current branch, otherwise nothing is
894 # initial branch heads of the current branch, otherwise nothing is
895 # printed anyway.
895 # printed anyway.
896 #
896 #
897 # Par Msg Comment
897 # Par Msg Comment
898 # NN y additional topo root
898 # NN y additional topo root
899 #
899 #
900 # BN y additional branch root
900 # BN y additional branch root
901 # CN y additional topo head
901 # CN y additional topo head
902 # HN n usual case
902 # HN n usual case
903 #
903 #
904 # BB y weird additional branch root
904 # BB y weird additional branch root
905 # CB y branch merge
905 # CB y branch merge
906 # HB n merge with named branch
906 # HB n merge with named branch
907 #
907 #
908 # CC y additional head from merge
908 # CC y additional head from merge
909 # CH n merge with a head
909 # CH n merge with a head
910 #
910 #
911 # HH n head merge: head count decreases
911 # HH n head merge: head count decreases
912
912
913 if not opts.get('close_branch'):
913 if not opts.get('close_branch'):
914 for r in parents:
914 for r in parents:
915 if r.extra().get('close') and r.branch() == branch:
915 if r.extra().get('close') and r.branch() == branch:
916 ui.status(_('reopening closed branch head %d\n') % r)
916 ui.status(_('reopening closed branch head %d\n') % r)
917
917
918 if ui.debugflag:
918 if ui.debugflag:
919 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
919 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
920 elif ui.verbose:
920 elif ui.verbose:
921 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
921 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
922
922
923 def copy(ui, repo, *pats, **opts):
923 def copy(ui, repo, *pats, **opts):
924 """mark files as copied for the next commit
924 """mark files as copied for the next commit
925
925
926 Mark dest as having copies of source files. If dest is a
926 Mark dest as having copies of source files. If dest is a
927 directory, copies are put in that directory. If dest is a file,
927 directory, copies are put in that directory. If dest is a file,
928 the source must be a single file.
928 the source must be a single file.
929
929
930 By default, this command copies the contents of files as they
930 By default, this command copies the contents of files as they
931 exist in the working directory. If invoked with -A/--after, the
931 exist in the working directory. If invoked with -A/--after, the
932 operation is recorded, but no copying is performed.
932 operation is recorded, but no copying is performed.
933
933
934 This command takes effect with the next commit. To undo a copy
934 This command takes effect with the next commit. To undo a copy
935 before that, see :hg:`revert`.
935 before that, see :hg:`revert`.
936
936
937 Returns 0 on success, 1 if errors are encountered.
937 Returns 0 on success, 1 if errors are encountered.
938 """
938 """
939 wlock = repo.wlock(False)
939 wlock = repo.wlock(False)
940 try:
940 try:
941 return cmdutil.copy(ui, repo, pats, opts)
941 return cmdutil.copy(ui, repo, pats, opts)
942 finally:
942 finally:
943 wlock.release()
943 wlock.release()
944
944
945 def debugancestor(ui, repo, *args):
945 def debugancestor(ui, repo, *args):
946 """find the ancestor revision of two revisions in a given index"""
946 """find the ancestor revision of two revisions in a given index"""
947 if len(args) == 3:
947 if len(args) == 3:
948 index, rev1, rev2 = args
948 index, rev1, rev2 = args
949 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
949 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
950 lookup = r.lookup
950 lookup = r.lookup
951 elif len(args) == 2:
951 elif len(args) == 2:
952 if not repo:
952 if not repo:
953 raise util.Abort(_("there is no Mercurial repository here "
953 raise util.Abort(_("there is no Mercurial repository here "
954 "(.hg not found)"))
954 "(.hg not found)"))
955 rev1, rev2 = args
955 rev1, rev2 = args
956 r = repo.changelog
956 r = repo.changelog
957 lookup = repo.lookup
957 lookup = repo.lookup
958 else:
958 else:
959 raise util.Abort(_('either two or three arguments required'))
959 raise util.Abort(_('either two or three arguments required'))
960 a = r.ancestor(lookup(rev1), lookup(rev2))
960 a = r.ancestor(lookup(rev1), lookup(rev2))
961 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
961 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
962
962
963 def debugbuilddag(ui, repo, text,
963 def debugbuilddag(ui, repo, text,
964 mergeable_file=False,
964 mergeable_file=False,
965 appended_file=False,
965 appended_file=False,
966 overwritten_file=False,
966 overwritten_file=False,
967 new_file=False):
967 new_file=False):
968 """builds a repo with a given dag from scratch in the current empty repo
968 """builds a repo with a given dag from scratch in the current empty repo
969
969
970 Elements:
970 Elements:
971
971
972 - "+n" is a linear run of n nodes based on the current default parent
972 - "+n" is a linear run of n nodes based on the current default parent
973 - "." is a single node based on the current default parent
973 - "." is a single node based on the current default parent
974 - "$" resets the default parent to null (implied at the start);
974 - "$" resets the default parent to null (implied at the start);
975 otherwise the default parent is always the last node created
975 otherwise the default parent is always the last node created
976 - "<p" sets the default parent to the backref p
976 - "<p" sets the default parent to the backref p
977 - "*p" is a fork at parent p, which is a backref
977 - "*p" is a fork at parent p, which is a backref
978 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
978 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
979 - "/p2" is a merge of the preceding node and p2
979 - "/p2" is a merge of the preceding node and p2
980 - ":tag" defines a local tag for the preceding node
980 - ":tag" defines a local tag for the preceding node
981 - "@branch" sets the named branch for subsequent nodes
981 - "@branch" sets the named branch for subsequent nodes
982 - "!command" runs the command using your shell
982 - "!command" runs the command using your shell
983 - "!!my command\\n" is like "!", but to the end of the line
983 - "!!my command\\n" is like "!", but to the end of the line
984 - "#...\\n" is a comment up to the end of the line
984 - "#...\\n" is a comment up to the end of the line
985
985
986 Whitespace between the above elements is ignored.
986 Whitespace between the above elements is ignored.
987
987
988 A backref is either
988 A backref is either
989
989
990 - a number n, which references the node curr-n, where curr is the current
990 - a number n, which references the node curr-n, where curr is the current
991 node, or
991 node, or
992 - the name of a local tag you placed earlier using ":tag", or
992 - the name of a local tag you placed earlier using ":tag", or
993 - empty to denote the default parent.
993 - empty to denote the default parent.
994
994
995 All string valued-elements are either strictly alphanumeric, or must
995 All string valued-elements are either strictly alphanumeric, or must
996 be enclosed in double quotes ("..."), with "\\" as escape character.
996 be enclosed in double quotes ("..."), with "\\" as escape character.
997
997
998 Note that the --overwritten-file and --appended-file options imply the
998 Note that the --overwritten-file and --appended-file options imply the
999 use of "HGMERGE=internal:local" during DAG buildup.
999 use of "HGMERGE=internal:local" during DAG buildup.
1000 """
1000 """
1001
1001
1002 if not (mergeable_file or appended_file or overwritten_file or new_file):
1002 if not (mergeable_file or appended_file or overwritten_file or new_file):
1003 raise util.Abort(_('need at least one of -m, -a, -o, -n'))
1003 raise util.Abort(_('need at least one of -m, -a, -o, -n'))
1004
1004
1005 if len(repo.changelog) > 0:
1005 if len(repo.changelog) > 0:
1006 raise util.Abort(_('repository is not empty'))
1006 raise util.Abort(_('repository is not empty'))
1007
1007
1008 if overwritten_file or appended_file:
1008 if overwritten_file or appended_file:
1009 # we don't want to fail in merges during buildup
1009 # we don't want to fail in merges during buildup
1010 os.environ['HGMERGE'] = 'internal:local'
1010 os.environ['HGMERGE'] = 'internal:local'
1011
1011
1012 def writefile(fname, text, fmode="wb"):
1012 def writefile(fname, text, fmode="wb"):
1013 f = open(fname, fmode)
1013 f = open(fname, fmode)
1014 try:
1014 try:
1015 f.write(text)
1015 f.write(text)
1016 finally:
1016 finally:
1017 f.close()
1017 f.close()
1018
1018
1019 if mergeable_file:
1019 if mergeable_file:
1020 linesperrev = 2
1020 linesperrev = 2
1021 # determine number of revs in DAG
1021 # determine number of revs in DAG
1022 n = 0
1022 n = 0
1023 for type, data in dagparser.parsedag(text):
1023 for type, data in dagparser.parsedag(text):
1024 if type == 'n':
1024 if type == 'n':
1025 n += 1
1025 n += 1
1026 # make a file with k lines per rev
1026 # make a file with k lines per rev
1027 writefile("mf", "\n".join(str(i) for i in xrange(0, n * linesperrev))
1027 writefile("mf", "\n".join(str(i) for i in xrange(0, n * linesperrev))
1028 + "\n")
1028 + "\n")
1029
1029
1030 at = -1
1030 at = -1
1031 atbranch = 'default'
1031 atbranch = 'default'
1032 for type, data in dagparser.parsedag(text):
1032 for type, data in dagparser.parsedag(text):
1033 if type == 'n':
1033 if type == 'n':
1034 ui.status('node %s\n' % str(data))
1034 ui.status('node %s\n' % str(data))
1035 id, ps = data
1035 id, ps = data
1036 p1 = ps[0]
1036 p1 = ps[0]
1037 if p1 != at:
1037 if p1 != at:
1038 update(ui, repo, node=str(p1), clean=True)
1038 update(ui, repo, node=str(p1), clean=True)
1039 at = p1
1039 at = p1
1040 if repo.dirstate.branch() != atbranch:
1040 if repo.dirstate.branch() != atbranch:
1041 branch(ui, repo, atbranch, force=True)
1041 branch(ui, repo, atbranch, force=True)
1042 if len(ps) > 1:
1042 if len(ps) > 1:
1043 p2 = ps[1]
1043 p2 = ps[1]
1044 merge(ui, repo, node=p2)
1044 merge(ui, repo, node=p2)
1045
1045
1046 if mergeable_file:
1046 if mergeable_file:
1047 f = open("mf", "rb+")
1047 f = open("mf", "rb+")
1048 try:
1048 try:
1049 lines = f.read().split("\n")
1049 lines = f.read().split("\n")
1050 lines[id * linesperrev] += " r%i" % id
1050 lines[id * linesperrev] += " r%i" % id
1051 f.seek(0)
1051 f.seek(0)
1052 f.write("\n".join(lines))
1052 f.write("\n".join(lines))
1053 finally:
1053 finally:
1054 f.close()
1054 f.close()
1055
1055
1056 if appended_file:
1056 if appended_file:
1057 writefile("af", "r%i\n" % id, "ab")
1057 writefile("af", "r%i\n" % id, "ab")
1058
1058
1059 if overwritten_file:
1059 if overwritten_file:
1060 writefile("of", "r%i\n" % id)
1060 writefile("of", "r%i\n" % id)
1061
1061
1062 if new_file:
1062 if new_file:
1063 writefile("nf%i" % id, "r%i\n" % id)
1063 writefile("nf%i" % id, "r%i\n" % id)
1064
1064
1065 commit(ui, repo, addremove=True, message="r%i" % id, date=(id, 0))
1065 commit(ui, repo, addremove=True, message="r%i" % id, date=(id, 0))
1066 at = id
1066 at = id
1067 elif type == 'l':
1067 elif type == 'l':
1068 id, name = data
1068 id, name = data
1069 ui.status('tag %s\n' % name)
1069 ui.status('tag %s\n' % name)
1070 tag(ui, repo, name, local=True)
1070 tag(ui, repo, name, local=True)
1071 elif type == 'a':
1071 elif type == 'a':
1072 ui.status('branch %s\n' % data)
1072 ui.status('branch %s\n' % data)
1073 atbranch = data
1073 atbranch = data
1074 elif type in 'cC':
1074 elif type in 'cC':
1075 r = util.system(data, cwd=repo.root)
1075 r = util.system(data, cwd=repo.root)
1076 if r:
1076 if r:
1077 desc, r = util.explain_exit(r)
1077 desc, r = util.explain_exit(r)
1078 raise util.Abort(_('%s command %s') % (data, desc))
1078 raise util.Abort(_('%s command %s') % (data, desc))
1079
1079
1080 def debugcommands(ui, cmd='', *args):
1080 def debugcommands(ui, cmd='', *args):
1081 """list all available commands and options"""
1081 """list all available commands and options"""
1082 for cmd, vals in sorted(table.iteritems()):
1082 for cmd, vals in sorted(table.iteritems()):
1083 cmd = cmd.split('|')[0].strip('^')
1083 cmd = cmd.split('|')[0].strip('^')
1084 opts = ', '.join([i[1] for i in vals[1]])
1084 opts = ', '.join([i[1] for i in vals[1]])
1085 ui.write('%s: %s\n' % (cmd, opts))
1085 ui.write('%s: %s\n' % (cmd, opts))
1086
1086
1087 def debugcomplete(ui, cmd='', **opts):
1087 def debugcomplete(ui, cmd='', **opts):
1088 """returns the completion list associated with the given command"""
1088 """returns the completion list associated with the given command"""
1089
1089
1090 if opts.get('options'):
1090 if opts.get('options'):
1091 options = []
1091 options = []
1092 otables = [globalopts]
1092 otables = [globalopts]
1093 if cmd:
1093 if cmd:
1094 aliases, entry = cmdutil.findcmd(cmd, table, False)
1094 aliases, entry = cmdutil.findcmd(cmd, table, False)
1095 otables.append(entry[1])
1095 otables.append(entry[1])
1096 for t in otables:
1096 for t in otables:
1097 for o in t:
1097 for o in t:
1098 if "(DEPRECATED)" in o[3]:
1098 if "(DEPRECATED)" in o[3]:
1099 continue
1099 continue
1100 if o[0]:
1100 if o[0]:
1101 options.append('-%s' % o[0])
1101 options.append('-%s' % o[0])
1102 options.append('--%s' % o[1])
1102 options.append('--%s' % o[1])
1103 ui.write("%s\n" % "\n".join(options))
1103 ui.write("%s\n" % "\n".join(options))
1104 return
1104 return
1105
1105
1106 cmdlist = cmdutil.findpossible(cmd, table)
1106 cmdlist = cmdutil.findpossible(cmd, table)
1107 if ui.verbose:
1107 if ui.verbose:
1108 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1108 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1109 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1109 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1110
1110
1111 def debugfsinfo(ui, path = "."):
1111 def debugfsinfo(ui, path = "."):
1112 """show information detected about current filesystem"""
1112 """show information detected about current filesystem"""
1113 open('.debugfsinfo', 'w').write('')
1113 open('.debugfsinfo', 'w').write('')
1114 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1114 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1115 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1115 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1116 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1116 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1117 and 'yes' or 'no'))
1117 and 'yes' or 'no'))
1118 os.unlink('.debugfsinfo')
1118 os.unlink('.debugfsinfo')
1119
1119
1120 def debugrebuildstate(ui, repo, rev="tip"):
1120 def debugrebuildstate(ui, repo, rev="tip"):
1121 """rebuild the dirstate as it would look like for the given revision"""
1121 """rebuild the dirstate as it would look like for the given revision"""
1122 ctx = cmdutil.revsingle(repo, rev)
1122 ctx = cmdutil.revsingle(repo, rev)
1123 wlock = repo.wlock()
1123 wlock = repo.wlock()
1124 try:
1124 try:
1125 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1125 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1126 finally:
1126 finally:
1127 wlock.release()
1127 wlock.release()
1128
1128
1129 def debugcheckstate(ui, repo):
1129 def debugcheckstate(ui, repo):
1130 """validate the correctness of the current dirstate"""
1130 """validate the correctness of the current dirstate"""
1131 parent1, parent2 = repo.dirstate.parents()
1131 parent1, parent2 = repo.dirstate.parents()
1132 m1 = repo[parent1].manifest()
1132 m1 = repo[parent1].manifest()
1133 m2 = repo[parent2].manifest()
1133 m2 = repo[parent2].manifest()
1134 errors = 0
1134 errors = 0
1135 for f in repo.dirstate:
1135 for f in repo.dirstate:
1136 state = repo.dirstate[f]
1136 state = repo.dirstate[f]
1137 if state in "nr" and f not in m1:
1137 if state in "nr" and f not in m1:
1138 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1138 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1139 errors += 1
1139 errors += 1
1140 if state in "a" and f in m1:
1140 if state in "a" and f in m1:
1141 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1141 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1142 errors += 1
1142 errors += 1
1143 if state in "m" and f not in m1 and f not in m2:
1143 if state in "m" and f not in m1 and f not in m2:
1144 ui.warn(_("%s in state %s, but not in either manifest\n") %
1144 ui.warn(_("%s in state %s, but not in either manifest\n") %
1145 (f, state))
1145 (f, state))
1146 errors += 1
1146 errors += 1
1147 for f in m1:
1147 for f in m1:
1148 state = repo.dirstate[f]
1148 state = repo.dirstate[f]
1149 if state not in "nrm":
1149 if state not in "nrm":
1150 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1150 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1151 errors += 1
1151 errors += 1
1152 if errors:
1152 if errors:
1153 error = _(".hg/dirstate inconsistent with current parent's manifest")
1153 error = _(".hg/dirstate inconsistent with current parent's manifest")
1154 raise util.Abort(error)
1154 raise util.Abort(error)
1155
1155
1156 def showconfig(ui, repo, *values, **opts):
1156 def showconfig(ui, repo, *values, **opts):
1157 """show combined config settings from all hgrc files
1157 """show combined config settings from all hgrc files
1158
1158
1159 With no arguments, print names and values of all config items.
1159 With no arguments, print names and values of all config items.
1160
1160
1161 With one argument of the form section.name, print just the value
1161 With one argument of the form section.name, print just the value
1162 of that config item.
1162 of that config item.
1163
1163
1164 With multiple arguments, print names and values of all config
1164 With multiple arguments, print names and values of all config
1165 items with matching section names.
1165 items with matching section names.
1166
1166
1167 With --debug, the source (filename and line number) is printed
1167 With --debug, the source (filename and line number) is printed
1168 for each config item.
1168 for each config item.
1169
1169
1170 Returns 0 on success.
1170 Returns 0 on success.
1171 """
1171 """
1172
1172
1173 for f in scmutil.rcpath():
1173 for f in scmutil.rcpath():
1174 ui.debug(_('read config from: %s\n') % f)
1174 ui.debug(_('read config from: %s\n') % f)
1175 untrusted = bool(opts.get('untrusted'))
1175 untrusted = bool(opts.get('untrusted'))
1176 if values:
1176 if values:
1177 sections = [v for v in values if '.' not in v]
1177 sections = [v for v in values if '.' not in v]
1178 items = [v for v in values if '.' in v]
1178 items = [v for v in values if '.' in v]
1179 if len(items) > 1 or items and sections:
1179 if len(items) > 1 or items and sections:
1180 raise util.Abort(_('only one config item permitted'))
1180 raise util.Abort(_('only one config item permitted'))
1181 for section, name, value in ui.walkconfig(untrusted=untrusted):
1181 for section, name, value in ui.walkconfig(untrusted=untrusted):
1182 value = str(value).replace('\n', '\\n')
1182 value = str(value).replace('\n', '\\n')
1183 sectname = section + '.' + name
1183 sectname = section + '.' + name
1184 if values:
1184 if values:
1185 for v in values:
1185 for v in values:
1186 if v == section:
1186 if v == section:
1187 ui.debug('%s: ' %
1187 ui.debug('%s: ' %
1188 ui.configsource(section, name, untrusted))
1188 ui.configsource(section, name, untrusted))
1189 ui.write('%s=%s\n' % (sectname, value))
1189 ui.write('%s=%s\n' % (sectname, value))
1190 elif v == sectname:
1190 elif v == sectname:
1191 ui.debug('%s: ' %
1191 ui.debug('%s: ' %
1192 ui.configsource(section, name, untrusted))
1192 ui.configsource(section, name, untrusted))
1193 ui.write(value, '\n')
1193 ui.write(value, '\n')
1194 else:
1194 else:
1195 ui.debug('%s: ' %
1195 ui.debug('%s: ' %
1196 ui.configsource(section, name, untrusted))
1196 ui.configsource(section, name, untrusted))
1197 ui.write('%s=%s\n' % (sectname, value))
1197 ui.write('%s=%s\n' % (sectname, value))
1198
1198
1199 def debugknown(ui, repopath, *ids, **opts):
1199 def debugknown(ui, repopath, *ids, **opts):
1200 """test whether node ids are known to a repo
1200 """test whether node ids are known to a repo
1201
1201
1202 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1202 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1203 indicating unknown/known.
1203 indicating unknown/known.
1204 """
1204 """
1205 repo = hg.repository(ui, repopath)
1205 repo = hg.repository(ui, repopath)
1206 if not repo.capable('known'):
1206 if not repo.capable('known'):
1207 raise util.Abort("known() not supported by target repository")
1207 raise util.Abort("known() not supported by target repository")
1208 flags = repo.known([bin(s) for s in ids])
1208 flags = repo.known([bin(s) for s in ids])
1209 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1209 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1210
1210
1211 def debugbundle(ui, bundlepath, all=None, **opts):
1211 def debugbundle(ui, bundlepath, all=None, **opts):
1212 """lists the contents of a bundle"""
1212 """lists the contents of a bundle"""
1213 f = url.open(ui, bundlepath)
1213 f = url.open(ui, bundlepath)
1214 try:
1214 try:
1215 gen = changegroup.readbundle(f, bundlepath)
1215 gen = changegroup.readbundle(f, bundlepath)
1216 if all:
1216 if all:
1217 ui.write("format: id, p1, p2, cset, len(delta)\n")
1217 ui.write("format: id, p1, p2, cset, len(delta)\n")
1218
1218
1219 def showchunks(named):
1219 def showchunks(named):
1220 ui.write("\n%s\n" % named)
1220 ui.write("\n%s\n" % named)
1221 while 1:
1221 while 1:
1222 chunkdata = gen.parsechunk()
1222 chunkdata = gen.parsechunk()
1223 if not chunkdata:
1223 if not chunkdata:
1224 break
1224 break
1225 node = chunkdata['node']
1225 node = chunkdata['node']
1226 p1 = chunkdata['p1']
1226 p1 = chunkdata['p1']
1227 p2 = chunkdata['p2']
1227 p2 = chunkdata['p2']
1228 cs = chunkdata['cs']
1228 cs = chunkdata['cs']
1229 delta = chunkdata['data']
1229 delta = chunkdata['data']
1230 ui.write("%s %s %s %s %s\n" %
1230 ui.write("%s %s %s %s %s\n" %
1231 (hex(node), hex(p1), hex(p2),
1231 (hex(node), hex(p1), hex(p2),
1232 hex(cs), len(delta)))
1232 hex(cs), len(delta)))
1233
1233
1234 showchunks("changelog")
1234 showchunks("changelog")
1235 showchunks("manifest")
1235 showchunks("manifest")
1236 while 1:
1236 while 1:
1237 fname = gen.chunk()
1237 fname = gen.chunk()
1238 if not fname:
1238 if not fname:
1239 break
1239 break
1240 showchunks(fname)
1240 showchunks(fname)
1241 else:
1241 else:
1242 while 1:
1242 while 1:
1243 chunkdata = gen.parsechunk()
1243 chunkdata = gen.parsechunk()
1244 if not chunkdata:
1244 if not chunkdata:
1245 break
1245 break
1246 node = chunkdata['node']
1246 node = chunkdata['node']
1247 ui.write("%s\n" % hex(node))
1247 ui.write("%s\n" % hex(node))
1248 finally:
1248 finally:
1249 f.close()
1249 f.close()
1250
1250
1251 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1251 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1252 """retrieves a bundle from a repo
1252 """retrieves a bundle from a repo
1253
1253
1254 Every ID must be a full-length hex node id string. Saves the bundle to the
1254 Every ID must be a full-length hex node id string. Saves the bundle to the
1255 given file.
1255 given file.
1256 """
1256 """
1257 repo = hg.repository(ui, repopath)
1257 repo = hg.repository(ui, repopath)
1258 if not repo.capable('getbundle'):
1258 if not repo.capable('getbundle'):
1259 raise util.Abort("getbundle() not supported by target repository")
1259 raise util.Abort("getbundle() not supported by target repository")
1260 args = {}
1260 args = {}
1261 if common:
1261 if common:
1262 args['common'] = [bin(s) for s in common]
1262 args['common'] = [bin(s) for s in common]
1263 if head:
1263 if head:
1264 args['heads'] = [bin(s) for s in head]
1264 args['heads'] = [bin(s) for s in head]
1265 bundle = repo.getbundle('debug', **args)
1265 bundle = repo.getbundle('debug', **args)
1266
1266
1267 bundletype = opts.get('type', 'bzip2').lower()
1267 bundletype = opts.get('type', 'bzip2').lower()
1268 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1268 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1269 bundletype = btypes.get(bundletype)
1269 bundletype = btypes.get(bundletype)
1270 if bundletype not in changegroup.bundletypes:
1270 if bundletype not in changegroup.bundletypes:
1271 raise util.Abort(_('unknown bundle type specified with --type'))
1271 raise util.Abort(_('unknown bundle type specified with --type'))
1272 changegroup.writebundle(bundle, bundlepath, bundletype)
1272 changegroup.writebundle(bundle, bundlepath, bundletype)
1273
1273
1274 def debugpushkey(ui, repopath, namespace, *keyinfo):
1274 def debugpushkey(ui, repopath, namespace, *keyinfo):
1275 '''access the pushkey key/value protocol
1275 '''access the pushkey key/value protocol
1276
1276
1277 With two args, list the keys in the given namespace.
1277 With two args, list the keys in the given namespace.
1278
1278
1279 With five args, set a key to new if it currently is set to old.
1279 With five args, set a key to new if it currently is set to old.
1280 Reports success or failure.
1280 Reports success or failure.
1281 '''
1281 '''
1282
1282
1283 target = hg.repository(ui, repopath)
1283 target = hg.repository(ui, repopath)
1284 if keyinfo:
1284 if keyinfo:
1285 key, old, new = keyinfo
1285 key, old, new = keyinfo
1286 r = target.pushkey(namespace, key, old, new)
1286 r = target.pushkey(namespace, key, old, new)
1287 ui.status(str(r) + '\n')
1287 ui.status(str(r) + '\n')
1288 return not r
1288 return not r
1289 else:
1289 else:
1290 for k, v in target.listkeys(namespace).iteritems():
1290 for k, v in target.listkeys(namespace).iteritems():
1291 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1291 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1292 v.encode('string-escape')))
1292 v.encode('string-escape')))
1293
1293
1294 def debugrevspec(ui, repo, expr):
1294 def debugrevspec(ui, repo, expr):
1295 '''parse and apply a revision specification'''
1295 '''parse and apply a revision specification'''
1296 if ui.verbose:
1296 if ui.verbose:
1297 tree = revset.parse(expr)[0]
1297 tree = revset.parse(expr)[0]
1298 ui.note(tree, "\n")
1298 ui.note(tree, "\n")
1299 func = revset.match(expr)
1299 func = revset.match(expr)
1300 for c in func(repo, range(len(repo))):
1300 for c in func(repo, range(len(repo))):
1301 ui.write("%s\n" % c)
1301 ui.write("%s\n" % c)
1302
1302
1303 def debugsetparents(ui, repo, rev1, rev2=None):
1303 def debugsetparents(ui, repo, rev1, rev2=None):
1304 """manually set the parents of the current working directory
1304 """manually set the parents of the current working directory
1305
1305
1306 This is useful for writing repository conversion tools, but should
1306 This is useful for writing repository conversion tools, but should
1307 be used with care.
1307 be used with care.
1308
1308
1309 Returns 0 on success.
1309 Returns 0 on success.
1310 """
1310 """
1311
1311
1312 r1 = cmdutil.revsingle(repo, rev1).node()
1312 r1 = cmdutil.revsingle(repo, rev1).node()
1313 r2 = cmdutil.revsingle(repo, rev2, 'null').node()
1313 r2 = cmdutil.revsingle(repo, rev2, 'null').node()
1314
1314
1315 wlock = repo.wlock()
1315 wlock = repo.wlock()
1316 try:
1316 try:
1317 repo.dirstate.setparents(r1, r2)
1317 repo.dirstate.setparents(r1, r2)
1318 finally:
1318 finally:
1319 wlock.release()
1319 wlock.release()
1320
1320
1321 def debugstate(ui, repo, nodates=None, datesort=None):
1321 def debugstate(ui, repo, nodates=None, datesort=None):
1322 """show the contents of the current dirstate"""
1322 """show the contents of the current dirstate"""
1323 timestr = ""
1323 timestr = ""
1324 showdate = not nodates
1324 showdate = not nodates
1325 if datesort:
1325 if datesort:
1326 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
1326 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
1327 else:
1327 else:
1328 keyfunc = None # sort by filename
1328 keyfunc = None # sort by filename
1329 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
1329 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
1330 if showdate:
1330 if showdate:
1331 if ent[3] == -1:
1331 if ent[3] == -1:
1332 # Pad or slice to locale representation
1332 # Pad or slice to locale representation
1333 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
1333 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
1334 time.localtime(0)))
1334 time.localtime(0)))
1335 timestr = 'unset'
1335 timestr = 'unset'
1336 timestr = (timestr[:locale_len] +
1336 timestr = (timestr[:locale_len] +
1337 ' ' * (locale_len - len(timestr)))
1337 ' ' * (locale_len - len(timestr)))
1338 else:
1338 else:
1339 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
1339 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
1340 time.localtime(ent[3]))
1340 time.localtime(ent[3]))
1341 if ent[1] & 020000:
1341 if ent[1] & 020000:
1342 mode = 'lnk'
1342 mode = 'lnk'
1343 else:
1343 else:
1344 mode = '%3o' % (ent[1] & 0777)
1344 mode = '%3o' % (ent[1] & 0777)
1345 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
1345 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
1346 for f in repo.dirstate.copies():
1346 for f in repo.dirstate.copies():
1347 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1347 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1348
1348
1349 def debugsub(ui, repo, rev=None):
1349 def debugsub(ui, repo, rev=None):
1350 ctx = cmdutil.revsingle(repo, rev, None)
1350 ctx = cmdutil.revsingle(repo, rev, None)
1351 for k, v in sorted(ctx.substate.items()):
1351 for k, v in sorted(ctx.substate.items()):
1352 ui.write('path %s\n' % k)
1352 ui.write('path %s\n' % k)
1353 ui.write(' source %s\n' % v[0])
1353 ui.write(' source %s\n' % v[0])
1354 ui.write(' revision %s\n' % v[1])
1354 ui.write(' revision %s\n' % v[1])
1355
1355
1356 def debugdag(ui, repo, file_=None, *revs, **opts):
1356 def debugdag(ui, repo, file_=None, *revs, **opts):
1357 """format the changelog or an index DAG as a concise textual description
1357 """format the changelog or an index DAG as a concise textual description
1358
1358
1359 If you pass a revlog index, the revlog's DAG is emitted. If you list
1359 If you pass a revlog index, the revlog's DAG is emitted. If you list
1360 revision numbers, they get labelled in the output as rN.
1360 revision numbers, they get labelled in the output as rN.
1361
1361
1362 Otherwise, the changelog DAG of the current repo is emitted.
1362 Otherwise, the changelog DAG of the current repo is emitted.
1363 """
1363 """
1364 spaces = opts.get('spaces')
1364 spaces = opts.get('spaces')
1365 dots = opts.get('dots')
1365 dots = opts.get('dots')
1366 if file_:
1366 if file_:
1367 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1367 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1368 revs = set((int(r) for r in revs))
1368 revs = set((int(r) for r in revs))
1369 def events():
1369 def events():
1370 for r in rlog:
1370 for r in rlog:
1371 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1371 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1372 if r in revs:
1372 if r in revs:
1373 yield 'l', (r, "r%i" % r)
1373 yield 'l', (r, "r%i" % r)
1374 elif repo:
1374 elif repo:
1375 cl = repo.changelog
1375 cl = repo.changelog
1376 tags = opts.get('tags')
1376 tags = opts.get('tags')
1377 branches = opts.get('branches')
1377 branches = opts.get('branches')
1378 if tags:
1378 if tags:
1379 labels = {}
1379 labels = {}
1380 for l, n in repo.tags().items():
1380 for l, n in repo.tags().items():
1381 labels.setdefault(cl.rev(n), []).append(l)
1381 labels.setdefault(cl.rev(n), []).append(l)
1382 def events():
1382 def events():
1383 b = "default"
1383 b = "default"
1384 for r in cl:
1384 for r in cl:
1385 if branches:
1385 if branches:
1386 newb = cl.read(cl.node(r))[5]['branch']
1386 newb = cl.read(cl.node(r))[5]['branch']
1387 if newb != b:
1387 if newb != b:
1388 yield 'a', newb
1388 yield 'a', newb
1389 b = newb
1389 b = newb
1390 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1390 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1391 if tags:
1391 if tags:
1392 ls = labels.get(r)
1392 ls = labels.get(r)
1393 if ls:
1393 if ls:
1394 for l in ls:
1394 for l in ls:
1395 yield 'l', (r, l)
1395 yield 'l', (r, l)
1396 else:
1396 else:
1397 raise util.Abort(_('need repo for changelog dag'))
1397 raise util.Abort(_('need repo for changelog dag'))
1398
1398
1399 for line in dagparser.dagtextlines(events(),
1399 for line in dagparser.dagtextlines(events(),
1400 addspaces=spaces,
1400 addspaces=spaces,
1401 wraplabels=True,
1401 wraplabels=True,
1402 wrapannotations=True,
1402 wrapannotations=True,
1403 wrapnonlinear=dots,
1403 wrapnonlinear=dots,
1404 usedots=dots,
1404 usedots=dots,
1405 maxlinewidth=70):
1405 maxlinewidth=70):
1406 ui.write(line)
1406 ui.write(line)
1407 ui.write("\n")
1407 ui.write("\n")
1408
1408
1409 def debugdata(ui, repo, file_, rev):
1409 def debugdata(ui, repo, file_, rev):
1410 """dump the contents of a data file revision"""
1410 """dump the contents of a data file revision"""
1411 r = None
1411 r = None
1412 if repo:
1412 if repo:
1413 filelog = repo.file(file_)
1413 filelog = repo.file(file_)
1414 if len(filelog):
1414 if len(filelog):
1415 r = filelog
1415 r = filelog
1416 if not r:
1416 if not r:
1417 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
1417 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
1418 file_[:-2] + ".i")
1418 file_[:-2] + ".i")
1419 try:
1419 try:
1420 ui.write(r.revision(r.lookup(rev)))
1420 ui.write(r.revision(r.lookup(rev)))
1421 except KeyError:
1421 except KeyError:
1422 raise util.Abort(_('invalid revision identifier %s') % rev)
1422 raise util.Abort(_('invalid revision identifier %s') % rev)
1423
1423
1424 def debugdate(ui, date, range=None, **opts):
1424 def debugdate(ui, date, range=None, **opts):
1425 """parse and display a date"""
1425 """parse and display a date"""
1426 if opts["extended"]:
1426 if opts["extended"]:
1427 d = util.parsedate(date, util.extendeddateformats)
1427 d = util.parsedate(date, util.extendeddateformats)
1428 else:
1428 else:
1429 d = util.parsedate(date)
1429 d = util.parsedate(date)
1430 ui.write("internal: %s %s\n" % d)
1430 ui.write("internal: %s %s\n" % d)
1431 ui.write("standard: %s\n" % util.datestr(d))
1431 ui.write("standard: %s\n" % util.datestr(d))
1432 if range:
1432 if range:
1433 m = util.matchdate(range)
1433 m = util.matchdate(range)
1434 ui.write("match: %s\n" % m(d[0]))
1434 ui.write("match: %s\n" % m(d[0]))
1435
1435
1436 def debugignore(ui, repo, *values, **opts):
1436 def debugignore(ui, repo, *values, **opts):
1437 """display the combined ignore pattern"""
1437 """display the combined ignore pattern"""
1438 ignore = repo.dirstate._ignore
1438 ignore = repo.dirstate._ignore
1439 if hasattr(ignore, 'includepat'):
1439 if hasattr(ignore, 'includepat'):
1440 ui.write("%s\n" % ignore.includepat)
1440 ui.write("%s\n" % ignore.includepat)
1441 else:
1441 else:
1442 raise util.Abort(_("no ignore patterns found"))
1442 raise util.Abort(_("no ignore patterns found"))
1443
1443
1444 def debugindex(ui, repo, file_, **opts):
1444 def debugindex(ui, repo, file_, **opts):
1445 """dump the contents of an index file"""
1445 """dump the contents of an index file"""
1446 r = None
1446 r = None
1447 if repo:
1447 if repo:
1448 filelog = repo.file(file_)
1448 filelog = repo.file(file_)
1449 if len(filelog):
1449 if len(filelog):
1450 r = filelog
1450 r = filelog
1451
1451
1452 format = opts.get('format', 0)
1452 format = opts.get('format', 0)
1453 if format not in (0, 1):
1453 if format not in (0, 1):
1454 raise util.Abort(_("unknown format %d") % format)
1454 raise util.Abort(_("unknown format %d") % format)
1455
1455
1456 if not r:
1456 if not r:
1457 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1457 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1458
1458
1459 if format == 0:
1459 if format == 0:
1460 ui.write(" rev offset length base linkrev"
1460 ui.write(" rev offset length base linkrev"
1461 " nodeid p1 p2\n")
1461 " nodeid p1 p2\n")
1462 elif format == 1:
1462 elif format == 1:
1463 ui.write(" rev flag offset length"
1463 ui.write(" rev flag offset length"
1464 " size base link p1 p2 nodeid\n")
1464 " size base link p1 p2 nodeid\n")
1465
1465
1466 for i in r:
1466 for i in r:
1467 node = r.node(i)
1467 node = r.node(i)
1468 if format == 0:
1468 if format == 0:
1469 try:
1469 try:
1470 pp = r.parents(node)
1470 pp = r.parents(node)
1471 except:
1471 except:
1472 pp = [nullid, nullid]
1472 pp = [nullid, nullid]
1473 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1473 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1474 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
1474 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
1475 short(node), short(pp[0]), short(pp[1])))
1475 short(node), short(pp[0]), short(pp[1])))
1476 elif format == 1:
1476 elif format == 1:
1477 pr = r.parentrevs(i)
1477 pr = r.parentrevs(i)
1478 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1478 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1479 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1479 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1480 r.base(i), r.linkrev(i), pr[0], pr[1], short(node)))
1480 r.base(i), r.linkrev(i), pr[0], pr[1], short(node)))
1481
1481
1482 def debugindexdot(ui, repo, file_):
1482 def debugindexdot(ui, repo, file_):
1483 """dump an index DAG as a graphviz dot file"""
1483 """dump an index DAG as a graphviz dot file"""
1484 r = None
1484 r = None
1485 if repo:
1485 if repo:
1486 filelog = repo.file(file_)
1486 filelog = repo.file(file_)
1487 if len(filelog):
1487 if len(filelog):
1488 r = filelog
1488 r = filelog
1489 if not r:
1489 if not r:
1490 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1490 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1491 ui.write("digraph G {\n")
1491 ui.write("digraph G {\n")
1492 for i in r:
1492 for i in r:
1493 node = r.node(i)
1493 node = r.node(i)
1494 pp = r.parents(node)
1494 pp = r.parents(node)
1495 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1495 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1496 if pp[1] != nullid:
1496 if pp[1] != nullid:
1497 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1497 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1498 ui.write("}\n")
1498 ui.write("}\n")
1499
1499
1500 def debuginstall(ui):
1500 def debuginstall(ui):
1501 '''test Mercurial installation
1501 '''test Mercurial installation
1502
1502
1503 Returns 0 on success.
1503 Returns 0 on success.
1504 '''
1504 '''
1505
1505
1506 def writetemp(contents):
1506 def writetemp(contents):
1507 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1507 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1508 f = os.fdopen(fd, "wb")
1508 f = os.fdopen(fd, "wb")
1509 f.write(contents)
1509 f.write(contents)
1510 f.close()
1510 f.close()
1511 return name
1511 return name
1512
1512
1513 problems = 0
1513 problems = 0
1514
1514
1515 # encoding
1515 # encoding
1516 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1516 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1517 try:
1517 try:
1518 encoding.fromlocal("test")
1518 encoding.fromlocal("test")
1519 except util.Abort, inst:
1519 except util.Abort, inst:
1520 ui.write(" %s\n" % inst)
1520 ui.write(" %s\n" % inst)
1521 ui.write(_(" (check that your locale is properly set)\n"))
1521 ui.write(_(" (check that your locale is properly set)\n"))
1522 problems += 1
1522 problems += 1
1523
1523
1524 # compiled modules
1524 # compiled modules
1525 ui.status(_("Checking installed modules (%s)...\n")
1525 ui.status(_("Checking installed modules (%s)...\n")
1526 % os.path.dirname(__file__))
1526 % os.path.dirname(__file__))
1527 try:
1527 try:
1528 import bdiff, mpatch, base85, osutil
1528 import bdiff, mpatch, base85, osutil
1529 except Exception, inst:
1529 except Exception, inst:
1530 ui.write(" %s\n" % inst)
1530 ui.write(" %s\n" % inst)
1531 ui.write(_(" One or more extensions could not be found"))
1531 ui.write(_(" One or more extensions could not be found"))
1532 ui.write(_(" (check that you compiled the extensions)\n"))
1532 ui.write(_(" (check that you compiled the extensions)\n"))
1533 problems += 1
1533 problems += 1
1534
1534
1535 # templates
1535 # templates
1536 ui.status(_("Checking templates...\n"))
1536 ui.status(_("Checking templates...\n"))
1537 try:
1537 try:
1538 import templater
1538 import templater
1539 templater.templater(templater.templatepath("map-cmdline.default"))
1539 templater.templater(templater.templatepath("map-cmdline.default"))
1540 except Exception, inst:
1540 except Exception, inst:
1541 ui.write(" %s\n" % inst)
1541 ui.write(" %s\n" % inst)
1542 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1542 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1543 problems += 1
1543 problems += 1
1544
1544
1545 # editor
1545 # editor
1546 ui.status(_("Checking commit editor...\n"))
1546 ui.status(_("Checking commit editor...\n"))
1547 editor = ui.geteditor()
1547 editor = ui.geteditor()
1548 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1548 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1549 if not cmdpath:
1549 if not cmdpath:
1550 if editor == 'vi':
1550 if editor == 'vi':
1551 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1551 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1552 ui.write(_(" (specify a commit editor in your configuration"
1552 ui.write(_(" (specify a commit editor in your configuration"
1553 " file)\n"))
1553 " file)\n"))
1554 else:
1554 else:
1555 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1555 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1556 ui.write(_(" (specify a commit editor in your configuration"
1556 ui.write(_(" (specify a commit editor in your configuration"
1557 " file)\n"))
1557 " file)\n"))
1558 problems += 1
1558 problems += 1
1559
1559
1560 # check username
1560 # check username
1561 ui.status(_("Checking username...\n"))
1561 ui.status(_("Checking username...\n"))
1562 try:
1562 try:
1563 ui.username()
1563 ui.username()
1564 except util.Abort, e:
1564 except util.Abort, e:
1565 ui.write(" %s\n" % e)
1565 ui.write(" %s\n" % e)
1566 ui.write(_(" (specify a username in your configuration file)\n"))
1566 ui.write(_(" (specify a username in your configuration file)\n"))
1567 problems += 1
1567 problems += 1
1568
1568
1569 if not problems:
1569 if not problems:
1570 ui.status(_("No problems detected\n"))
1570 ui.status(_("No problems detected\n"))
1571 else:
1571 else:
1572 ui.write(_("%s problems detected,"
1572 ui.write(_("%s problems detected,"
1573 " please check your install!\n") % problems)
1573 " please check your install!\n") % problems)
1574
1574
1575 return problems
1575 return problems
1576
1576
1577 def debugrename(ui, repo, file1, *pats, **opts):
1577 def debugrename(ui, repo, file1, *pats, **opts):
1578 """dump rename information"""
1578 """dump rename information"""
1579
1579
1580 ctx = cmdutil.revsingle(repo, opts.get('rev'))
1580 ctx = cmdutil.revsingle(repo, opts.get('rev'))
1581 m = cmdutil.match(repo, (file1,) + pats, opts)
1581 m = cmdutil.match(repo, (file1,) + pats, opts)
1582 for abs in ctx.walk(m):
1582 for abs in ctx.walk(m):
1583 fctx = ctx[abs]
1583 fctx = ctx[abs]
1584 o = fctx.filelog().renamed(fctx.filenode())
1584 o = fctx.filelog().renamed(fctx.filenode())
1585 rel = m.rel(abs)
1585 rel = m.rel(abs)
1586 if o:
1586 if o:
1587 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1587 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1588 else:
1588 else:
1589 ui.write(_("%s not renamed\n") % rel)
1589 ui.write(_("%s not renamed\n") % rel)
1590
1590
1591 def debugwalk(ui, repo, *pats, **opts):
1591 def debugwalk(ui, repo, *pats, **opts):
1592 """show how files match on given patterns"""
1592 """show how files match on given patterns"""
1593 m = cmdutil.match(repo, pats, opts)
1593 m = cmdutil.match(repo, pats, opts)
1594 items = list(repo.walk(m))
1594 items = list(repo.walk(m))
1595 if not items:
1595 if not items:
1596 return
1596 return
1597 fmt = 'f %%-%ds %%-%ds %%s' % (
1597 fmt = 'f %%-%ds %%-%ds %%s' % (
1598 max([len(abs) for abs in items]),
1598 max([len(abs) for abs in items]),
1599 max([len(m.rel(abs)) for abs in items]))
1599 max([len(m.rel(abs)) for abs in items]))
1600 for abs in items:
1600 for abs in items:
1601 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1601 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1602 ui.write("%s\n" % line.rstrip())
1602 ui.write("%s\n" % line.rstrip())
1603
1603
1604 def debugwireargs(ui, repopath, *vals, **opts):
1604 def debugwireargs(ui, repopath, *vals, **opts):
1605 repo = hg.repository(hg.remoteui(ui, opts), repopath)
1605 repo = hg.repository(hg.remoteui(ui, opts), repopath)
1606 for opt in remoteopts:
1606 for opt in remoteopts:
1607 del opts[opt[1]]
1607 del opts[opt[1]]
1608 args = {}
1608 args = {}
1609 for k, v in opts.iteritems():
1609 for k, v in opts.iteritems():
1610 if v:
1610 if v:
1611 args[k] = v
1611 args[k] = v
1612 # run twice to check that we don't mess up the stream for the next command
1612 # run twice to check that we don't mess up the stream for the next command
1613 res1 = repo.debugwireargs(*vals, **args)
1613 res1 = repo.debugwireargs(*vals, **args)
1614 res2 = repo.debugwireargs(*vals, **args)
1614 res2 = repo.debugwireargs(*vals, **args)
1615 ui.write("%s\n" % res1)
1615 ui.write("%s\n" % res1)
1616 if res1 != res2:
1616 if res1 != res2:
1617 ui.warn("%s\n" % res2)
1617 ui.warn("%s\n" % res2)
1618
1618
1619 def diff(ui, repo, *pats, **opts):
1619 def diff(ui, repo, *pats, **opts):
1620 """diff repository (or selected files)
1620 """diff repository (or selected files)
1621
1621
1622 Show differences between revisions for the specified files.
1622 Show differences between revisions for the specified files.
1623
1623
1624 Differences between files are shown using the unified diff format.
1624 Differences between files are shown using the unified diff format.
1625
1625
1626 .. note::
1626 .. note::
1627 diff may generate unexpected results for merges, as it will
1627 diff may generate unexpected results for merges, as it will
1628 default to comparing against the working directory's first
1628 default to comparing against the working directory's first
1629 parent changeset if no revisions are specified.
1629 parent changeset if no revisions are specified.
1630
1630
1631 When two revision arguments are given, then changes are shown
1631 When two revision arguments are given, then changes are shown
1632 between those revisions. If only one revision is specified then
1632 between those revisions. If only one revision is specified then
1633 that revision is compared to the working directory, and, when no
1633 that revision is compared to the working directory, and, when no
1634 revisions are specified, the working directory files are compared
1634 revisions are specified, the working directory files are compared
1635 to its parent.
1635 to its parent.
1636
1636
1637 Alternatively you can specify -c/--change with a revision to see
1637 Alternatively you can specify -c/--change with a revision to see
1638 the changes in that changeset relative to its first parent.
1638 the changes in that changeset relative to its first parent.
1639
1639
1640 Without the -a/--text option, diff will avoid generating diffs of
1640 Without the -a/--text option, diff will avoid generating diffs of
1641 files it detects as binary. With -a, diff will generate a diff
1641 files it detects as binary. With -a, diff will generate a diff
1642 anyway, probably with undesirable results.
1642 anyway, probably with undesirable results.
1643
1643
1644 Use the -g/--git option to generate diffs in the git extended diff
1644 Use the -g/--git option to generate diffs in the git extended diff
1645 format. For more information, read :hg:`help diffs`.
1645 format. For more information, read :hg:`help diffs`.
1646
1646
1647 Returns 0 on success.
1647 Returns 0 on success.
1648 """
1648 """
1649
1649
1650 revs = opts.get('rev')
1650 revs = opts.get('rev')
1651 change = opts.get('change')
1651 change = opts.get('change')
1652 stat = opts.get('stat')
1652 stat = opts.get('stat')
1653 reverse = opts.get('reverse')
1653 reverse = opts.get('reverse')
1654
1654
1655 if revs and change:
1655 if revs and change:
1656 msg = _('cannot specify --rev and --change at the same time')
1656 msg = _('cannot specify --rev and --change at the same time')
1657 raise util.Abort(msg)
1657 raise util.Abort(msg)
1658 elif change:
1658 elif change:
1659 node2 = cmdutil.revsingle(repo, change, None).node()
1659 node2 = cmdutil.revsingle(repo, change, None).node()
1660 node1 = repo[node2].p1().node()
1660 node1 = repo[node2].p1().node()
1661 else:
1661 else:
1662 node1, node2 = cmdutil.revpair(repo, revs)
1662 node1, node2 = cmdutil.revpair(repo, revs)
1663
1663
1664 if reverse:
1664 if reverse:
1665 node1, node2 = node2, node1
1665 node1, node2 = node2, node1
1666
1666
1667 diffopts = patch.diffopts(ui, opts)
1667 diffopts = patch.diffopts(ui, opts)
1668 m = cmdutil.match(repo, pats, opts)
1668 m = cmdutil.match(repo, pats, opts)
1669 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1669 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1670 listsubrepos=opts.get('subrepos'))
1670 listsubrepos=opts.get('subrepos'))
1671
1671
1672 def export(ui, repo, *changesets, **opts):
1672 def export(ui, repo, *changesets, **opts):
1673 """dump the header and diffs for one or more changesets
1673 """dump the header and diffs for one or more changesets
1674
1674
1675 Print the changeset header and diffs for one or more revisions.
1675 Print the changeset header and diffs for one or more revisions.
1676
1676
1677 The information shown in the changeset header is: author, date,
1677 The information shown in the changeset header is: author, date,
1678 branch name (if non-default), changeset hash, parent(s) and commit
1678 branch name (if non-default), changeset hash, parent(s) and commit
1679 comment.
1679 comment.
1680
1680
1681 .. note::
1681 .. note::
1682 export may generate unexpected diff output for merge
1682 export may generate unexpected diff output for merge
1683 changesets, as it will compare the merge changeset against its
1683 changesets, as it will compare the merge changeset against its
1684 first parent only.
1684 first parent only.
1685
1685
1686 Output may be to a file, in which case the name of the file is
1686 Output may be to a file, in which case the name of the file is
1687 given using a format string. The formatting rules are as follows:
1687 given using a format string. The formatting rules are as follows:
1688
1688
1689 :``%%``: literal "%" character
1689 :``%%``: literal "%" character
1690 :``%H``: changeset hash (40 hexadecimal digits)
1690 :``%H``: changeset hash (40 hexadecimal digits)
1691 :``%N``: number of patches being generated
1691 :``%N``: number of patches being generated
1692 :``%R``: changeset revision number
1692 :``%R``: changeset revision number
1693 :``%b``: basename of the exporting repository
1693 :``%b``: basename of the exporting repository
1694 :``%h``: short-form changeset hash (12 hexadecimal digits)
1694 :``%h``: short-form changeset hash (12 hexadecimal digits)
1695 :``%n``: zero-padded sequence number, starting at 1
1695 :``%n``: zero-padded sequence number, starting at 1
1696 :``%r``: zero-padded changeset revision number
1696 :``%r``: zero-padded changeset revision number
1697
1697
1698 Without the -a/--text option, export will avoid generating diffs
1698 Without the -a/--text option, export will avoid generating diffs
1699 of files it detects as binary. With -a, export will generate a
1699 of files it detects as binary. With -a, export will generate a
1700 diff anyway, probably with undesirable results.
1700 diff anyway, probably with undesirable results.
1701
1701
1702 Use the -g/--git option to generate diffs in the git extended diff
1702 Use the -g/--git option to generate diffs in the git extended diff
1703 format. See :hg:`help diffs` for more information.
1703 format. See :hg:`help diffs` for more information.
1704
1704
1705 With the --switch-parent option, the diff will be against the
1705 With the --switch-parent option, the diff will be against the
1706 second parent. It can be useful to review a merge.
1706 second parent. It can be useful to review a merge.
1707
1707
1708 Returns 0 on success.
1708 Returns 0 on success.
1709 """
1709 """
1710 changesets += tuple(opts.get('rev', []))
1710 changesets += tuple(opts.get('rev', []))
1711 if not changesets:
1711 if not changesets:
1712 raise util.Abort(_("export requires at least one changeset"))
1712 raise util.Abort(_("export requires at least one changeset"))
1713 revs = cmdutil.revrange(repo, changesets)
1713 revs = cmdutil.revrange(repo, changesets)
1714 if len(revs) > 1:
1714 if len(revs) > 1:
1715 ui.note(_('exporting patches:\n'))
1715 ui.note(_('exporting patches:\n'))
1716 else:
1716 else:
1717 ui.note(_('exporting patch:\n'))
1717 ui.note(_('exporting patch:\n'))
1718 cmdutil.export(repo, revs, template=opts.get('output'),
1718 cmdutil.export(repo, revs, template=opts.get('output'),
1719 switch_parent=opts.get('switch_parent'),
1719 switch_parent=opts.get('switch_parent'),
1720 opts=patch.diffopts(ui, opts))
1720 opts=patch.diffopts(ui, opts))
1721
1721
1722 def forget(ui, repo, *pats, **opts):
1722 def forget(ui, repo, *pats, **opts):
1723 """forget the specified files on the next commit
1723 """forget the specified files on the next commit
1724
1724
1725 Mark the specified files so they will no longer be tracked
1725 Mark the specified files so they will no longer be tracked
1726 after the next commit.
1726 after the next commit.
1727
1727
1728 This only removes files from the current branch, not from the
1728 This only removes files from the current branch, not from the
1729 entire project history, and it does not delete them from the
1729 entire project history, and it does not delete them from the
1730 working directory.
1730 working directory.
1731
1731
1732 To undo a forget before the next commit, see :hg:`add`.
1732 To undo a forget before the next commit, see :hg:`add`.
1733
1733
1734 Returns 0 on success.
1734 Returns 0 on success.
1735 """
1735 """
1736
1736
1737 if not pats:
1737 if not pats:
1738 raise util.Abort(_('no files specified'))
1738 raise util.Abort(_('no files specified'))
1739
1739
1740 m = cmdutil.match(repo, pats, opts)
1740 m = cmdutil.match(repo, pats, opts)
1741 s = repo.status(match=m, clean=True)
1741 s = repo.status(match=m, clean=True)
1742 forget = sorted(s[0] + s[1] + s[3] + s[6])
1742 forget = sorted(s[0] + s[1] + s[3] + s[6])
1743 errs = 0
1743 errs = 0
1744
1744
1745 for f in m.files():
1745 for f in m.files():
1746 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1746 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1747 ui.warn(_('not removing %s: file is already untracked\n')
1747 ui.warn(_('not removing %s: file is already untracked\n')
1748 % m.rel(f))
1748 % m.rel(f))
1749 errs = 1
1749 errs = 1
1750
1750
1751 for f in forget:
1751 for f in forget:
1752 if ui.verbose or not m.exact(f):
1752 if ui.verbose or not m.exact(f):
1753 ui.status(_('removing %s\n') % m.rel(f))
1753 ui.status(_('removing %s\n') % m.rel(f))
1754
1754
1755 repo[None].remove(forget, unlink=False)
1755 repo[None].remove(forget, unlink=False)
1756 return errs
1756 return errs
1757
1757
1758 def grep(ui, repo, pattern, *pats, **opts):
1758 def grep(ui, repo, pattern, *pats, **opts):
1759 """search for a pattern in specified files and revisions
1759 """search for a pattern in specified files and revisions
1760
1760
1761 Search revisions of files for a regular expression.
1761 Search revisions of files for a regular expression.
1762
1762
1763 This command behaves differently than Unix grep. It only accepts
1763 This command behaves differently than Unix grep. It only accepts
1764 Python/Perl regexps. It searches repository history, not the
1764 Python/Perl regexps. It searches repository history, not the
1765 working directory. It always prints the revision number in which a
1765 working directory. It always prints the revision number in which a
1766 match appears.
1766 match appears.
1767
1767
1768 By default, grep only prints output for the first revision of a
1768 By default, grep only prints output for the first revision of a
1769 file in which it finds a match. To get it to print every revision
1769 file in which it finds a match. To get it to print every revision
1770 that contains a change in match status ("-" for a match that
1770 that contains a change in match status ("-" for a match that
1771 becomes a non-match, or "+" for a non-match that becomes a match),
1771 becomes a non-match, or "+" for a non-match that becomes a match),
1772 use the --all flag.
1772 use the --all flag.
1773
1773
1774 Returns 0 if a match is found, 1 otherwise.
1774 Returns 0 if a match is found, 1 otherwise.
1775 """
1775 """
1776 reflags = 0
1776 reflags = 0
1777 if opts.get('ignore_case'):
1777 if opts.get('ignore_case'):
1778 reflags |= re.I
1778 reflags |= re.I
1779 try:
1779 try:
1780 regexp = re.compile(pattern, reflags)
1780 regexp = re.compile(pattern, reflags)
1781 except re.error, inst:
1781 except re.error, inst:
1782 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1782 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1783 return 1
1783 return 1
1784 sep, eol = ':', '\n'
1784 sep, eol = ':', '\n'
1785 if opts.get('print0'):
1785 if opts.get('print0'):
1786 sep = eol = '\0'
1786 sep = eol = '\0'
1787
1787
1788 getfile = util.lrucachefunc(repo.file)
1788 getfile = util.lrucachefunc(repo.file)
1789
1789
1790 def matchlines(body):
1790 def matchlines(body):
1791 begin = 0
1791 begin = 0
1792 linenum = 0
1792 linenum = 0
1793 while True:
1793 while True:
1794 match = regexp.search(body, begin)
1794 match = regexp.search(body, begin)
1795 if not match:
1795 if not match:
1796 break
1796 break
1797 mstart, mend = match.span()
1797 mstart, mend = match.span()
1798 linenum += body.count('\n', begin, mstart) + 1
1798 linenum += body.count('\n', begin, mstart) + 1
1799 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1799 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1800 begin = body.find('\n', mend) + 1 or len(body)
1800 begin = body.find('\n', mend) + 1 or len(body)
1801 lend = begin - 1
1801 lend = begin - 1
1802 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1802 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1803
1803
1804 class linestate(object):
1804 class linestate(object):
1805 def __init__(self, line, linenum, colstart, colend):
1805 def __init__(self, line, linenum, colstart, colend):
1806 self.line = line
1806 self.line = line
1807 self.linenum = linenum
1807 self.linenum = linenum
1808 self.colstart = colstart
1808 self.colstart = colstart
1809 self.colend = colend
1809 self.colend = colend
1810
1810
1811 def __hash__(self):
1811 def __hash__(self):
1812 return hash((self.linenum, self.line))
1812 return hash((self.linenum, self.line))
1813
1813
1814 def __eq__(self, other):
1814 def __eq__(self, other):
1815 return self.line == other.line
1815 return self.line == other.line
1816
1816
1817 matches = {}
1817 matches = {}
1818 copies = {}
1818 copies = {}
1819 def grepbody(fn, rev, body):
1819 def grepbody(fn, rev, body):
1820 matches[rev].setdefault(fn, [])
1820 matches[rev].setdefault(fn, [])
1821 m = matches[rev][fn]
1821 m = matches[rev][fn]
1822 for lnum, cstart, cend, line in matchlines(body):
1822 for lnum, cstart, cend, line in matchlines(body):
1823 s = linestate(line, lnum, cstart, cend)
1823 s = linestate(line, lnum, cstart, cend)
1824 m.append(s)
1824 m.append(s)
1825
1825
1826 def difflinestates(a, b):
1826 def difflinestates(a, b):
1827 sm = difflib.SequenceMatcher(None, a, b)
1827 sm = difflib.SequenceMatcher(None, a, b)
1828 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1828 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1829 if tag == 'insert':
1829 if tag == 'insert':
1830 for i in xrange(blo, bhi):
1830 for i in xrange(blo, bhi):
1831 yield ('+', b[i])
1831 yield ('+', b[i])
1832 elif tag == 'delete':
1832 elif tag == 'delete':
1833 for i in xrange(alo, ahi):
1833 for i in xrange(alo, ahi):
1834 yield ('-', a[i])
1834 yield ('-', a[i])
1835 elif tag == 'replace':
1835 elif tag == 'replace':
1836 for i in xrange(alo, ahi):
1836 for i in xrange(alo, ahi):
1837 yield ('-', a[i])
1837 yield ('-', a[i])
1838 for i in xrange(blo, bhi):
1838 for i in xrange(blo, bhi):
1839 yield ('+', b[i])
1839 yield ('+', b[i])
1840
1840
1841 def display(fn, ctx, pstates, states):
1841 def display(fn, ctx, pstates, states):
1842 rev = ctx.rev()
1842 rev = ctx.rev()
1843 datefunc = ui.quiet and util.shortdate or util.datestr
1843 datefunc = ui.quiet and util.shortdate or util.datestr
1844 found = False
1844 found = False
1845 filerevmatches = {}
1845 filerevmatches = {}
1846 def binary():
1846 def binary():
1847 flog = getfile(fn)
1847 flog = getfile(fn)
1848 return util.binary(flog.read(ctx.filenode(fn)))
1848 return util.binary(flog.read(ctx.filenode(fn)))
1849
1849
1850 if opts.get('all'):
1850 if opts.get('all'):
1851 iter = difflinestates(pstates, states)
1851 iter = difflinestates(pstates, states)
1852 else:
1852 else:
1853 iter = [('', l) for l in states]
1853 iter = [('', l) for l in states]
1854 for change, l in iter:
1854 for change, l in iter:
1855 cols = [fn, str(rev)]
1855 cols = [fn, str(rev)]
1856 before, match, after = None, None, None
1856 before, match, after = None, None, None
1857 if opts.get('line_number'):
1857 if opts.get('line_number'):
1858 cols.append(str(l.linenum))
1858 cols.append(str(l.linenum))
1859 if opts.get('all'):
1859 if opts.get('all'):
1860 cols.append(change)
1860 cols.append(change)
1861 if opts.get('user'):
1861 if opts.get('user'):
1862 cols.append(ui.shortuser(ctx.user()))
1862 cols.append(ui.shortuser(ctx.user()))
1863 if opts.get('date'):
1863 if opts.get('date'):
1864 cols.append(datefunc(ctx.date()))
1864 cols.append(datefunc(ctx.date()))
1865 if opts.get('files_with_matches'):
1865 if opts.get('files_with_matches'):
1866 c = (fn, rev)
1866 c = (fn, rev)
1867 if c in filerevmatches:
1867 if c in filerevmatches:
1868 continue
1868 continue
1869 filerevmatches[c] = 1
1869 filerevmatches[c] = 1
1870 else:
1870 else:
1871 before = l.line[:l.colstart]
1871 before = l.line[:l.colstart]
1872 match = l.line[l.colstart:l.colend]
1872 match = l.line[l.colstart:l.colend]
1873 after = l.line[l.colend:]
1873 after = l.line[l.colend:]
1874 ui.write(sep.join(cols))
1874 ui.write(sep.join(cols))
1875 if before is not None:
1875 if before is not None:
1876 if not opts.get('text') and binary():
1876 if not opts.get('text') and binary():
1877 ui.write(sep + " Binary file matches")
1877 ui.write(sep + " Binary file matches")
1878 else:
1878 else:
1879 ui.write(sep + before)
1879 ui.write(sep + before)
1880 ui.write(match, label='grep.match')
1880 ui.write(match, label='grep.match')
1881 ui.write(after)
1881 ui.write(after)
1882 ui.write(eol)
1882 ui.write(eol)
1883 found = True
1883 found = True
1884 return found
1884 return found
1885
1885
1886 skip = {}
1886 skip = {}
1887 revfiles = {}
1887 revfiles = {}
1888 matchfn = cmdutil.match(repo, pats, opts)
1888 matchfn = cmdutil.match(repo, pats, opts)
1889 found = False
1889 found = False
1890 follow = opts.get('follow')
1890 follow = opts.get('follow')
1891
1891
1892 def prep(ctx, fns):
1892 def prep(ctx, fns):
1893 rev = ctx.rev()
1893 rev = ctx.rev()
1894 pctx = ctx.p1()
1894 pctx = ctx.p1()
1895 parent = pctx.rev()
1895 parent = pctx.rev()
1896 matches.setdefault(rev, {})
1896 matches.setdefault(rev, {})
1897 matches.setdefault(parent, {})
1897 matches.setdefault(parent, {})
1898 files = revfiles.setdefault(rev, [])
1898 files = revfiles.setdefault(rev, [])
1899 for fn in fns:
1899 for fn in fns:
1900 flog = getfile(fn)
1900 flog = getfile(fn)
1901 try:
1901 try:
1902 fnode = ctx.filenode(fn)
1902 fnode = ctx.filenode(fn)
1903 except error.LookupError:
1903 except error.LookupError:
1904 continue
1904 continue
1905
1905
1906 copied = flog.renamed(fnode)
1906 copied = flog.renamed(fnode)
1907 copy = follow and copied and copied[0]
1907 copy = follow and copied and copied[0]
1908 if copy:
1908 if copy:
1909 copies.setdefault(rev, {})[fn] = copy
1909 copies.setdefault(rev, {})[fn] = copy
1910 if fn in skip:
1910 if fn in skip:
1911 if copy:
1911 if copy:
1912 skip[copy] = True
1912 skip[copy] = True
1913 continue
1913 continue
1914 files.append(fn)
1914 files.append(fn)
1915
1915
1916 if fn not in matches[rev]:
1916 if fn not in matches[rev]:
1917 grepbody(fn, rev, flog.read(fnode))
1917 grepbody(fn, rev, flog.read(fnode))
1918
1918
1919 pfn = copy or fn
1919 pfn = copy or fn
1920 if pfn not in matches[parent]:
1920 if pfn not in matches[parent]:
1921 try:
1921 try:
1922 fnode = pctx.filenode(pfn)
1922 fnode = pctx.filenode(pfn)
1923 grepbody(pfn, parent, flog.read(fnode))
1923 grepbody(pfn, parent, flog.read(fnode))
1924 except error.LookupError:
1924 except error.LookupError:
1925 pass
1925 pass
1926
1926
1927 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1927 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1928 rev = ctx.rev()
1928 rev = ctx.rev()
1929 parent = ctx.p1().rev()
1929 parent = ctx.p1().rev()
1930 for fn in sorted(revfiles.get(rev, [])):
1930 for fn in sorted(revfiles.get(rev, [])):
1931 states = matches[rev][fn]
1931 states = matches[rev][fn]
1932 copy = copies.get(rev, {}).get(fn)
1932 copy = copies.get(rev, {}).get(fn)
1933 if fn in skip:
1933 if fn in skip:
1934 if copy:
1934 if copy:
1935 skip[copy] = True
1935 skip[copy] = True
1936 continue
1936 continue
1937 pstates = matches.get(parent, {}).get(copy or fn, [])
1937 pstates = matches.get(parent, {}).get(copy or fn, [])
1938 if pstates or states:
1938 if pstates or states:
1939 r = display(fn, ctx, pstates, states)
1939 r = display(fn, ctx, pstates, states)
1940 found = found or r
1940 found = found or r
1941 if r and not opts.get('all'):
1941 if r and not opts.get('all'):
1942 skip[fn] = True
1942 skip[fn] = True
1943 if copy:
1943 if copy:
1944 skip[copy] = True
1944 skip[copy] = True
1945 del matches[rev]
1945 del matches[rev]
1946 del revfiles[rev]
1946 del revfiles[rev]
1947
1947
1948 return not found
1948 return not found
1949
1949
1950 def heads(ui, repo, *branchrevs, **opts):
1950 def heads(ui, repo, *branchrevs, **opts):
1951 """show current repository heads or show branch heads
1951 """show current repository heads or show branch heads
1952
1952
1953 With no arguments, show all repository branch heads.
1953 With no arguments, show all repository branch heads.
1954
1954
1955 Repository "heads" are changesets with no child changesets. They are
1955 Repository "heads" are changesets with no child changesets. They are
1956 where development generally takes place and are the usual targets
1956 where development generally takes place and are the usual targets
1957 for update and merge operations. Branch heads are changesets that have
1957 for update and merge operations. Branch heads are changesets that have
1958 no child changeset on the same branch.
1958 no child changeset on the same branch.
1959
1959
1960 If one or more REVs are given, only branch heads on the branches
1960 If one or more REVs are given, only branch heads on the branches
1961 associated with the specified changesets are shown.
1961 associated with the specified changesets are shown.
1962
1962
1963 If -c/--closed is specified, also show branch heads marked closed
1963 If -c/--closed is specified, also show branch heads marked closed
1964 (see :hg:`commit --close-branch`).
1964 (see :hg:`commit --close-branch`).
1965
1965
1966 If STARTREV is specified, only those heads that are descendants of
1966 If STARTREV is specified, only those heads that are descendants of
1967 STARTREV will be displayed.
1967 STARTREV will be displayed.
1968
1968
1969 If -t/--topo is specified, named branch mechanics will be ignored and only
1969 If -t/--topo is specified, named branch mechanics will be ignored and only
1970 changesets without children will be shown.
1970 changesets without children will be shown.
1971
1971
1972 Returns 0 if matching heads are found, 1 if not.
1972 Returns 0 if matching heads are found, 1 if not.
1973 """
1973 """
1974
1974
1975 start = None
1975 start = None
1976 if 'rev' in opts:
1976 if 'rev' in opts:
1977 start = cmdutil.revsingle(repo, opts['rev'], None).node()
1977 start = cmdutil.revsingle(repo, opts['rev'], None).node()
1978
1978
1979 if opts.get('topo'):
1979 if opts.get('topo'):
1980 heads = [repo[h] for h in repo.heads(start)]
1980 heads = [repo[h] for h in repo.heads(start)]
1981 else:
1981 else:
1982 heads = []
1982 heads = []
1983 for b, ls in repo.branchmap().iteritems():
1983 for b, ls in repo.branchmap().iteritems():
1984 if start is None:
1984 if start is None:
1985 heads += [repo[h] for h in ls]
1985 heads += [repo[h] for h in ls]
1986 continue
1986 continue
1987 startrev = repo.changelog.rev(start)
1987 startrev = repo.changelog.rev(start)
1988 descendants = set(repo.changelog.descendants(startrev))
1988 descendants = set(repo.changelog.descendants(startrev))
1989 descendants.add(startrev)
1989 descendants.add(startrev)
1990 rev = repo.changelog.rev
1990 rev = repo.changelog.rev
1991 heads += [repo[h] for h in ls if rev(h) in descendants]
1991 heads += [repo[h] for h in ls if rev(h) in descendants]
1992
1992
1993 if branchrevs:
1993 if branchrevs:
1994 branches = set(repo[br].branch() for br in branchrevs)
1994 branches = set(repo[br].branch() for br in branchrevs)
1995 heads = [h for h in heads if h.branch() in branches]
1995 heads = [h for h in heads if h.branch() in branches]
1996
1996
1997 if not opts.get('closed'):
1997 if not opts.get('closed'):
1998 heads = [h for h in heads if not h.extra().get('close')]
1998 heads = [h for h in heads if not h.extra().get('close')]
1999
1999
2000 if opts.get('active') and branchrevs:
2000 if opts.get('active') and branchrevs:
2001 dagheads = repo.heads(start)
2001 dagheads = repo.heads(start)
2002 heads = [h for h in heads if h.node() in dagheads]
2002 heads = [h for h in heads if h.node() in dagheads]
2003
2003
2004 if branchrevs:
2004 if branchrevs:
2005 haveheads = set(h.branch() for h in heads)
2005 haveheads = set(h.branch() for h in heads)
2006 if branches - haveheads:
2006 if branches - haveheads:
2007 headless = ', '.join(b for b in branches - haveheads)
2007 headless = ', '.join(b for b in branches - haveheads)
2008 msg = _('no open branch heads found on branches %s')
2008 msg = _('no open branch heads found on branches %s')
2009 if opts.get('rev'):
2009 if opts.get('rev'):
2010 msg += _(' (started at %s)' % opts['rev'])
2010 msg += _(' (started at %s)' % opts['rev'])
2011 ui.warn((msg + '\n') % headless)
2011 ui.warn((msg + '\n') % headless)
2012
2012
2013 if not heads:
2013 if not heads:
2014 return 1
2014 return 1
2015
2015
2016 heads = sorted(heads, key=lambda x: -x.rev())
2016 heads = sorted(heads, key=lambda x: -x.rev())
2017 displayer = cmdutil.show_changeset(ui, repo, opts)
2017 displayer = cmdutil.show_changeset(ui, repo, opts)
2018 for ctx in heads:
2018 for ctx in heads:
2019 displayer.show(ctx)
2019 displayer.show(ctx)
2020 displayer.close()
2020 displayer.close()
2021
2021
2022 def help_(ui, name=None, with_version=False, unknowncmd=False, full=True):
2022 def help_(ui, name=None, with_version=False, unknowncmd=False, full=True):
2023 """show help for a given topic or a help overview
2023 """show help for a given topic or a help overview
2024
2024
2025 With no arguments, print a list of commands with short help messages.
2025 With no arguments, print a list of commands with short help messages.
2026
2026
2027 Given a topic, extension, or command name, print help for that
2027 Given a topic, extension, or command name, print help for that
2028 topic.
2028 topic.
2029
2029
2030 Returns 0 if successful.
2030 Returns 0 if successful.
2031 """
2031 """
2032 option_lists = []
2032 option_lists = []
2033 textwidth = min(ui.termwidth(), 80) - 2
2033 textwidth = min(ui.termwidth(), 80) - 2
2034
2034
2035 def addglobalopts(aliases):
2035 def addglobalopts(aliases):
2036 if ui.verbose:
2036 if ui.verbose:
2037 option_lists.append((_("global options:"), globalopts))
2037 option_lists.append((_("global options:"), globalopts))
2038 if name == 'shortlist':
2038 if name == 'shortlist':
2039 option_lists.append((_('use "hg help" for the full list '
2039 option_lists.append((_('use "hg help" for the full list '
2040 'of commands'), ()))
2040 'of commands'), ()))
2041 else:
2041 else:
2042 if name == 'shortlist':
2042 if name == 'shortlist':
2043 msg = _('use "hg help" for the full list of commands '
2043 msg = _('use "hg help" for the full list of commands '
2044 'or "hg -v" for details')
2044 'or "hg -v" for details')
2045 elif name and not full:
2045 elif name and not full:
2046 msg = _('use "hg help %s" to show the full help text' % name)
2046 msg = _('use "hg help %s" to show the full help text' % name)
2047 elif aliases:
2047 elif aliases:
2048 msg = _('use "hg -v help%s" to show builtin aliases and '
2048 msg = _('use "hg -v help%s" to show builtin aliases and '
2049 'global options') % (name and " " + name or "")
2049 'global options') % (name and " " + name or "")
2050 else:
2050 else:
2051 msg = _('use "hg -v help %s" to show global options') % name
2051 msg = _('use "hg -v help %s" to show global options') % name
2052 option_lists.append((msg, ()))
2052 option_lists.append((msg, ()))
2053
2053
2054 def helpcmd(name):
2054 def helpcmd(name):
2055 if with_version:
2055 if with_version:
2056 version_(ui)
2056 version_(ui)
2057 ui.write('\n')
2057 ui.write('\n')
2058
2058
2059 try:
2059 try:
2060 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2060 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2061 except error.AmbiguousCommand, inst:
2061 except error.AmbiguousCommand, inst:
2062 # py3k fix: except vars can't be used outside the scope of the
2062 # py3k fix: except vars can't be used outside the scope of the
2063 # except block, nor can be used inside a lambda. python issue4617
2063 # except block, nor can be used inside a lambda. python issue4617
2064 prefix = inst.args[0]
2064 prefix = inst.args[0]
2065 select = lambda c: c.lstrip('^').startswith(prefix)
2065 select = lambda c: c.lstrip('^').startswith(prefix)
2066 helplist(_('list of commands:\n\n'), select)
2066 helplist(_('list of commands:\n\n'), select)
2067 return
2067 return
2068
2068
2069 # check if it's an invalid alias and display its error if it is
2069 # check if it's an invalid alias and display its error if it is
2070 if getattr(entry[0], 'badalias', False):
2070 if getattr(entry[0], 'badalias', False):
2071 if not unknowncmd:
2071 if not unknowncmd:
2072 entry[0](ui)
2072 entry[0](ui)
2073 return
2073 return
2074
2074
2075 # synopsis
2075 # synopsis
2076 if len(entry) > 2:
2076 if len(entry) > 2:
2077 if entry[2].startswith('hg'):
2077 if entry[2].startswith('hg'):
2078 ui.write("%s\n" % entry[2])
2078 ui.write("%s\n" % entry[2])
2079 else:
2079 else:
2080 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
2080 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
2081 else:
2081 else:
2082 ui.write('hg %s\n' % aliases[0])
2082 ui.write('hg %s\n' % aliases[0])
2083
2083
2084 # aliases
2084 # aliases
2085 if full and not ui.quiet and len(aliases) > 1:
2085 if full and not ui.quiet and len(aliases) > 1:
2086 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
2086 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
2087
2087
2088 # description
2088 # description
2089 doc = gettext(entry[0].__doc__)
2089 doc = gettext(entry[0].__doc__)
2090 if not doc:
2090 if not doc:
2091 doc = _("(no help text available)")
2091 doc = _("(no help text available)")
2092 if hasattr(entry[0], 'definition'): # aliased command
2092 if hasattr(entry[0], 'definition'): # aliased command
2093 if entry[0].definition.startswith('!'): # shell alias
2093 if entry[0].definition.startswith('!'): # shell alias
2094 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
2094 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
2095 else:
2095 else:
2096 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
2096 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
2097 if ui.quiet or not full:
2097 if ui.quiet or not full:
2098 doc = doc.splitlines()[0]
2098 doc = doc.splitlines()[0]
2099 keep = ui.verbose and ['verbose'] or []
2099 keep = ui.verbose and ['verbose'] or []
2100 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
2100 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
2101 ui.write("\n%s\n" % formatted)
2101 ui.write("\n%s\n" % formatted)
2102 if pruned:
2102 if pruned:
2103 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
2103 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
2104
2104
2105 if not ui.quiet:
2105 if not ui.quiet:
2106 # options
2106 # options
2107 if entry[1]:
2107 if entry[1]:
2108 option_lists.append((_("options:\n"), entry[1]))
2108 option_lists.append((_("options:\n"), entry[1]))
2109
2109
2110 addglobalopts(False)
2110 addglobalopts(False)
2111
2111
2112 def helplist(header, select=None):
2112 def helplist(header, select=None):
2113 h = {}
2113 h = {}
2114 cmds = {}
2114 cmds = {}
2115 for c, e in table.iteritems():
2115 for c, e in table.iteritems():
2116 f = c.split("|", 1)[0]
2116 f = c.split("|", 1)[0]
2117 if select and not select(f):
2117 if select and not select(f):
2118 continue
2118 continue
2119 if (not select and name != 'shortlist' and
2119 if (not select and name != 'shortlist' and
2120 e[0].__module__ != __name__):
2120 e[0].__module__ != __name__):
2121 continue
2121 continue
2122 if name == "shortlist" and not f.startswith("^"):
2122 if name == "shortlist" and not f.startswith("^"):
2123 continue
2123 continue
2124 f = f.lstrip("^")
2124 f = f.lstrip("^")
2125 if not ui.debugflag and f.startswith("debug"):
2125 if not ui.debugflag and f.startswith("debug"):
2126 continue
2126 continue
2127 doc = e[0].__doc__
2127 doc = e[0].__doc__
2128 if doc and 'DEPRECATED' in doc and not ui.verbose:
2128 if doc and 'DEPRECATED' in doc and not ui.verbose:
2129 continue
2129 continue
2130 doc = gettext(doc)
2130 doc = gettext(doc)
2131 if not doc:
2131 if not doc:
2132 doc = _("(no help text available)")
2132 doc = _("(no help text available)")
2133 h[f] = doc.splitlines()[0].rstrip()
2133 h[f] = doc.splitlines()[0].rstrip()
2134 cmds[f] = c.lstrip("^")
2134 cmds[f] = c.lstrip("^")
2135
2135
2136 if not h:
2136 if not h:
2137 ui.status(_('no commands defined\n'))
2137 ui.status(_('no commands defined\n'))
2138 return
2138 return
2139
2139
2140 ui.status(header)
2140 ui.status(header)
2141 fns = sorted(h)
2141 fns = sorted(h)
2142 m = max(map(len, fns))
2142 m = max(map(len, fns))
2143 for f in fns:
2143 for f in fns:
2144 if ui.verbose:
2144 if ui.verbose:
2145 commands = cmds[f].replace("|",", ")
2145 commands = cmds[f].replace("|",", ")
2146 ui.write(" %s:\n %s\n"%(commands, h[f]))
2146 ui.write(" %s:\n %s\n"%(commands, h[f]))
2147 else:
2147 else:
2148 ui.write('%s\n' % (util.wrap(h[f], textwidth,
2148 ui.write('%s\n' % (util.wrap(h[f], textwidth,
2149 initindent=' %-*s ' % (m, f),
2149 initindent=' %-*s ' % (m, f),
2150 hangindent=' ' * (m + 4))))
2150 hangindent=' ' * (m + 4))))
2151
2151
2152 if not ui.quiet:
2152 if not ui.quiet:
2153 addglobalopts(True)
2153 addglobalopts(True)
2154
2154
2155 def helptopic(name):
2155 def helptopic(name):
2156 for names, header, doc in help.helptable:
2156 for names, header, doc in help.helptable:
2157 if name in names:
2157 if name in names:
2158 break
2158 break
2159 else:
2159 else:
2160 raise error.UnknownCommand(name)
2160 raise error.UnknownCommand(name)
2161
2161
2162 # description
2162 # description
2163 if not doc:
2163 if not doc:
2164 doc = _("(no help text available)")
2164 doc = _("(no help text available)")
2165 if hasattr(doc, '__call__'):
2165 if hasattr(doc, '__call__'):
2166 doc = doc()
2166 doc = doc()
2167
2167
2168 ui.write("%s\n\n" % header)
2168 ui.write("%s\n\n" % header)
2169 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
2169 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
2170
2170
2171 def helpext(name):
2171 def helpext(name):
2172 try:
2172 try:
2173 mod = extensions.find(name)
2173 mod = extensions.find(name)
2174 doc = gettext(mod.__doc__) or _('no help text available')
2174 doc = gettext(mod.__doc__) or _('no help text available')
2175 except KeyError:
2175 except KeyError:
2176 mod = None
2176 mod = None
2177 doc = extensions.disabledext(name)
2177 doc = extensions.disabledext(name)
2178 if not doc:
2178 if not doc:
2179 raise error.UnknownCommand(name)
2179 raise error.UnknownCommand(name)
2180
2180
2181 if '\n' not in doc:
2181 if '\n' not in doc:
2182 head, tail = doc, ""
2182 head, tail = doc, ""
2183 else:
2183 else:
2184 head, tail = doc.split('\n', 1)
2184 head, tail = doc.split('\n', 1)
2185 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
2185 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
2186 if tail:
2186 if tail:
2187 ui.write(minirst.format(tail, textwidth))
2187 ui.write(minirst.format(tail, textwidth))
2188 ui.status('\n\n')
2188 ui.status('\n\n')
2189
2189
2190 if mod:
2190 if mod:
2191 try:
2191 try:
2192 ct = mod.cmdtable
2192 ct = mod.cmdtable
2193 except AttributeError:
2193 except AttributeError:
2194 ct = {}
2194 ct = {}
2195 modcmds = set([c.split('|', 1)[0] for c in ct])
2195 modcmds = set([c.split('|', 1)[0] for c in ct])
2196 helplist(_('list of commands:\n\n'), modcmds.__contains__)
2196 helplist(_('list of commands:\n\n'), modcmds.__contains__)
2197 else:
2197 else:
2198 ui.write(_('use "hg help extensions" for information on enabling '
2198 ui.write(_('use "hg help extensions" for information on enabling '
2199 'extensions\n'))
2199 'extensions\n'))
2200
2200
2201 def helpextcmd(name):
2201 def helpextcmd(name):
2202 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
2202 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
2203 doc = gettext(mod.__doc__).splitlines()[0]
2203 doc = gettext(mod.__doc__).splitlines()[0]
2204
2204
2205 msg = help.listexts(_("'%s' is provided by the following "
2205 msg = help.listexts(_("'%s' is provided by the following "
2206 "extension:") % cmd, {ext: doc}, len(ext),
2206 "extension:") % cmd, {ext: doc}, len(ext),
2207 indent=4)
2207 indent=4)
2208 ui.write(minirst.format(msg, textwidth))
2208 ui.write(minirst.format(msg, textwidth))
2209 ui.write('\n\n')
2209 ui.write('\n\n')
2210 ui.write(_('use "hg help extensions" for information on enabling '
2210 ui.write(_('use "hg help extensions" for information on enabling '
2211 'extensions\n'))
2211 'extensions\n'))
2212
2212
2213 help.addtopichook('revsets', revset.makedoc)
2213 help.addtopichook('revsets', revset.makedoc)
2214 help.addtopichook('templates', templatekw.makedoc)
2214 help.addtopichook('templates', templatekw.makedoc)
2215 help.addtopichook('templates', templatefilters.makedoc)
2215 help.addtopichook('templates', templatefilters.makedoc)
2216
2216
2217 if name and name != 'shortlist':
2217 if name and name != 'shortlist':
2218 i = None
2218 i = None
2219 if unknowncmd:
2219 if unknowncmd:
2220 queries = (helpextcmd,)
2220 queries = (helpextcmd,)
2221 else:
2221 else:
2222 queries = (helptopic, helpcmd, helpext, helpextcmd)
2222 queries = (helptopic, helpcmd, helpext, helpextcmd)
2223 for f in queries:
2223 for f in queries:
2224 try:
2224 try:
2225 f(name)
2225 f(name)
2226 i = None
2226 i = None
2227 break
2227 break
2228 except error.UnknownCommand, inst:
2228 except error.UnknownCommand, inst:
2229 i = inst
2229 i = inst
2230 if i:
2230 if i:
2231 raise i
2231 raise i
2232
2232
2233 else:
2233 else:
2234 # program name
2234 # program name
2235 if ui.verbose or with_version:
2235 if ui.verbose or with_version:
2236 version_(ui)
2236 version_(ui)
2237 else:
2237 else:
2238 ui.status(_("Mercurial Distributed SCM\n"))
2238 ui.status(_("Mercurial Distributed SCM\n"))
2239 ui.status('\n')
2239 ui.status('\n')
2240
2240
2241 # list of commands
2241 # list of commands
2242 if name == "shortlist":
2242 if name == "shortlist":
2243 header = _('basic commands:\n\n')
2243 header = _('basic commands:\n\n')
2244 else:
2244 else:
2245 header = _('list of commands:\n\n')
2245 header = _('list of commands:\n\n')
2246
2246
2247 helplist(header)
2247 helplist(header)
2248 if name != 'shortlist':
2248 if name != 'shortlist':
2249 exts, maxlength = extensions.enabled()
2249 exts, maxlength = extensions.enabled()
2250 text = help.listexts(_('enabled extensions:'), exts, maxlength)
2250 text = help.listexts(_('enabled extensions:'), exts, maxlength)
2251 if text:
2251 if text:
2252 ui.write("\n%s\n" % minirst.format(text, textwidth))
2252 ui.write("\n%s\n" % minirst.format(text, textwidth))
2253
2253
2254 # list all option lists
2254 # list all option lists
2255 opt_output = []
2255 opt_output = []
2256 multioccur = False
2256 multioccur = False
2257 for title, options in option_lists:
2257 for title, options in option_lists:
2258 opt_output.append(("\n%s" % title, None))
2258 opt_output.append(("\n%s" % title, None))
2259 for option in options:
2259 for option in options:
2260 if len(option) == 5:
2260 if len(option) == 5:
2261 shortopt, longopt, default, desc, optlabel = option
2261 shortopt, longopt, default, desc, optlabel = option
2262 else:
2262 else:
2263 shortopt, longopt, default, desc = option
2263 shortopt, longopt, default, desc = option
2264 optlabel = _("VALUE") # default label
2264 optlabel = _("VALUE") # default label
2265
2265
2266 if _("DEPRECATED") in desc and not ui.verbose:
2266 if _("DEPRECATED") in desc and not ui.verbose:
2267 continue
2267 continue
2268 if isinstance(default, list):
2268 if isinstance(default, list):
2269 numqualifier = " %s [+]" % optlabel
2269 numqualifier = " %s [+]" % optlabel
2270 multioccur = True
2270 multioccur = True
2271 elif (default is not None) and not isinstance(default, bool):
2271 elif (default is not None) and not isinstance(default, bool):
2272 numqualifier = " %s" % optlabel
2272 numqualifier = " %s" % optlabel
2273 else:
2273 else:
2274 numqualifier = ""
2274 numqualifier = ""
2275 opt_output.append(("%2s%s" %
2275 opt_output.append(("%2s%s" %
2276 (shortopt and "-%s" % shortopt,
2276 (shortopt and "-%s" % shortopt,
2277 longopt and " --%s%s" %
2277 longopt and " --%s%s" %
2278 (longopt, numqualifier)),
2278 (longopt, numqualifier)),
2279 "%s%s" % (desc,
2279 "%s%s" % (desc,
2280 default
2280 default
2281 and _(" (default: %s)") % default
2281 and _(" (default: %s)") % default
2282 or "")))
2282 or "")))
2283 if multioccur:
2283 if multioccur:
2284 msg = _("\n[+] marked option can be specified multiple times")
2284 msg = _("\n[+] marked option can be specified multiple times")
2285 if ui.verbose and name != 'shortlist':
2285 if ui.verbose and name != 'shortlist':
2286 opt_output.append((msg, None))
2286 opt_output.append((msg, None))
2287 else:
2287 else:
2288 opt_output.insert(-1, (msg, None))
2288 opt_output.insert(-1, (msg, None))
2289
2289
2290 if not name:
2290 if not name:
2291 ui.write(_("\nadditional help topics:\n\n"))
2291 ui.write(_("\nadditional help topics:\n\n"))
2292 topics = []
2292 topics = []
2293 for names, header, doc in help.helptable:
2293 for names, header, doc in help.helptable:
2294 topics.append((sorted(names, key=len, reverse=True)[0], header))
2294 topics.append((sorted(names, key=len, reverse=True)[0], header))
2295 topics_len = max([len(s[0]) for s in topics])
2295 topics_len = max([len(s[0]) for s in topics])
2296 for t, desc in topics:
2296 for t, desc in topics:
2297 ui.write(" %-*s %s\n" % (topics_len, t, desc))
2297 ui.write(" %-*s %s\n" % (topics_len, t, desc))
2298
2298
2299 if opt_output:
2299 if opt_output:
2300 colwidth = encoding.colwidth
2300 colwidth = encoding.colwidth
2301 # normalize: (opt or message, desc or None, width of opt)
2301 # normalize: (opt or message, desc or None, width of opt)
2302 entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0)
2302 entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0)
2303 for opt, desc in opt_output]
2303 for opt, desc in opt_output]
2304 hanging = max([e[2] for e in entries])
2304 hanging = max([e[2] for e in entries])
2305 for opt, desc, width in entries:
2305 for opt, desc, width in entries:
2306 if desc:
2306 if desc:
2307 initindent = ' %s%s ' % (opt, ' ' * (hanging - width))
2307 initindent = ' %s%s ' % (opt, ' ' * (hanging - width))
2308 hangindent = ' ' * (hanging + 3)
2308 hangindent = ' ' * (hanging + 3)
2309 ui.write('%s\n' % (util.wrap(desc, textwidth,
2309 ui.write('%s\n' % (util.wrap(desc, textwidth,
2310 initindent=initindent,
2310 initindent=initindent,
2311 hangindent=hangindent)))
2311 hangindent=hangindent)))
2312 else:
2312 else:
2313 ui.write("%s\n" % opt)
2313 ui.write("%s\n" % opt)
2314
2314
2315 def identify(ui, repo, source=None, rev=None,
2315 def identify(ui, repo, source=None, rev=None,
2316 num=None, id=None, branch=None, tags=None, bookmarks=None):
2316 num=None, id=None, branch=None, tags=None, bookmarks=None):
2317 """identify the working copy or specified revision
2317 """identify the working copy or specified revision
2318
2318
2319 Print a summary identifying the repository state at REV using one or
2319 Print a summary identifying the repository state at REV using one or
2320 two parent hash identifiers, followed by a "+" if the working
2320 two parent hash identifiers, followed by a "+" if the working
2321 directory has uncommitted changes, the branch name (if not default),
2321 directory has uncommitted changes, the branch name (if not default),
2322 a list of tags, and a list of bookmarks.
2322 a list of tags, and a list of bookmarks.
2323
2323
2324 When REV is not given, print a summary of the current state of the
2324 When REV is not given, print a summary of the current state of the
2325 repository.
2325 repository.
2326
2326
2327 Specifying a path to a repository root or Mercurial bundle will
2327 Specifying a path to a repository root or Mercurial bundle will
2328 cause lookup to operate on that repository/bundle.
2328 cause lookup to operate on that repository/bundle.
2329
2329
2330 Returns 0 if successful.
2330 Returns 0 if successful.
2331 """
2331 """
2332
2332
2333 if not repo and not source:
2333 if not repo and not source:
2334 raise util.Abort(_("there is no Mercurial repository here "
2334 raise util.Abort(_("there is no Mercurial repository here "
2335 "(.hg not found)"))
2335 "(.hg not found)"))
2336
2336
2337 hexfunc = ui.debugflag and hex or short
2337 hexfunc = ui.debugflag and hex or short
2338 default = not (num or id or branch or tags or bookmarks)
2338 default = not (num or id or branch or tags or bookmarks)
2339 output = []
2339 output = []
2340 revs = []
2340 revs = []
2341
2341
2342 if source:
2342 if source:
2343 source, branches = hg.parseurl(ui.expandpath(source))
2343 source, branches = hg.parseurl(ui.expandpath(source))
2344 repo = hg.repository(ui, source)
2344 repo = hg.repository(ui, source)
2345 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
2345 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
2346
2346
2347 if not repo.local():
2347 if not repo.local():
2348 if num or branch or tags:
2348 if num or branch or tags:
2349 raise util.Abort(
2349 raise util.Abort(
2350 _("can't query remote revision number, branch, or tags"))
2350 _("can't query remote revision number, branch, or tags"))
2351 if not rev and revs:
2351 if not rev and revs:
2352 rev = revs[0]
2352 rev = revs[0]
2353 if not rev:
2353 if not rev:
2354 rev = "tip"
2354 rev = "tip"
2355
2355
2356 remoterev = repo.lookup(rev)
2356 remoterev = repo.lookup(rev)
2357 if default or id:
2357 if default or id:
2358 output = [hexfunc(remoterev)]
2358 output = [hexfunc(remoterev)]
2359
2359
2360 def getbms():
2360 def getbms():
2361 bms = []
2361 bms = []
2362
2362
2363 if 'bookmarks' in repo.listkeys('namespaces'):
2363 if 'bookmarks' in repo.listkeys('namespaces'):
2364 hexremoterev = hex(remoterev)
2364 hexremoterev = hex(remoterev)
2365 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
2365 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
2366 if bmr == hexremoterev]
2366 if bmr == hexremoterev]
2367
2367
2368 return bms
2368 return bms
2369
2369
2370 if bookmarks:
2370 if bookmarks:
2371 output.extend(getbms())
2371 output.extend(getbms())
2372 elif default and not ui.quiet:
2372 elif default and not ui.quiet:
2373 # multiple bookmarks for a single parent separated by '/'
2373 # multiple bookmarks for a single parent separated by '/'
2374 bm = '/'.join(getbms())
2374 bm = '/'.join(getbms())
2375 if bm:
2375 if bm:
2376 output.append(bm)
2376 output.append(bm)
2377 else:
2377 else:
2378 if not rev:
2378 if not rev:
2379 ctx = repo[None]
2379 ctx = repo[None]
2380 parents = ctx.parents()
2380 parents = ctx.parents()
2381 changed = ""
2381 changed = ""
2382 if default or id or num:
2382 if default or id or num:
2383 changed = util.any(repo.status()) and "+" or ""
2383 changed = util.any(repo.status()) and "+" or ""
2384 if default or id:
2384 if default or id:
2385 output = ["%s%s" %
2385 output = ["%s%s" %
2386 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2386 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2387 if num:
2387 if num:
2388 output.append("%s%s" %
2388 output.append("%s%s" %
2389 ('+'.join([str(p.rev()) for p in parents]), changed))
2389 ('+'.join([str(p.rev()) for p in parents]), changed))
2390 else:
2390 else:
2391 ctx = cmdutil.revsingle(repo, rev)
2391 ctx = cmdutil.revsingle(repo, rev)
2392 if default or id:
2392 if default or id:
2393 output = [hexfunc(ctx.node())]
2393 output = [hexfunc(ctx.node())]
2394 if num:
2394 if num:
2395 output.append(str(ctx.rev()))
2395 output.append(str(ctx.rev()))
2396
2396
2397 if default and not ui.quiet:
2397 if default and not ui.quiet:
2398 b = ctx.branch()
2398 b = ctx.branch()
2399 if b != 'default':
2399 if b != 'default':
2400 output.append("(%s)" % b)
2400 output.append("(%s)" % b)
2401
2401
2402 # multiple tags for a single parent separated by '/'
2402 # multiple tags for a single parent separated by '/'
2403 t = '/'.join(ctx.tags())
2403 t = '/'.join(ctx.tags())
2404 if t:
2404 if t:
2405 output.append(t)
2405 output.append(t)
2406
2406
2407 # multiple bookmarks for a single parent separated by '/'
2407 # multiple bookmarks for a single parent separated by '/'
2408 bm = '/'.join(ctx.bookmarks())
2408 bm = '/'.join(ctx.bookmarks())
2409 if bm:
2409 if bm:
2410 output.append(bm)
2410 output.append(bm)
2411 else:
2411 else:
2412 if branch:
2412 if branch:
2413 output.append(ctx.branch())
2413 output.append(ctx.branch())
2414
2414
2415 if tags:
2415 if tags:
2416 output.extend(ctx.tags())
2416 output.extend(ctx.tags())
2417
2417
2418 if bookmarks:
2418 if bookmarks:
2419 output.extend(ctx.bookmarks())
2419 output.extend(ctx.bookmarks())
2420
2420
2421 ui.write("%s\n" % ' '.join(output))
2421 ui.write("%s\n" % ' '.join(output))
2422
2422
2423 def import_(ui, repo, patch1, *patches, **opts):
2423 def import_(ui, repo, patch1, *patches, **opts):
2424 """import an ordered set of patches
2424 """import an ordered set of patches
2425
2425
2426 Import a list of patches and commit them individually (unless
2426 Import a list of patches and commit them individually (unless
2427 --no-commit is specified).
2427 --no-commit is specified).
2428
2428
2429 If there are outstanding changes in the working directory, import
2429 If there are outstanding changes in the working directory, import
2430 will abort unless given the -f/--force flag.
2430 will abort unless given the -f/--force flag.
2431
2431
2432 You can import a patch straight from a mail message. Even patches
2432 You can import a patch straight from a mail message. Even patches
2433 as attachments work (to use the body part, it must have type
2433 as attachments work (to use the body part, it must have type
2434 text/plain or text/x-patch). From and Subject headers of email
2434 text/plain or text/x-patch). From and Subject headers of email
2435 message are used as default committer and commit message. All
2435 message are used as default committer and commit message. All
2436 text/plain body parts before first diff are added to commit
2436 text/plain body parts before first diff are added to commit
2437 message.
2437 message.
2438
2438
2439 If the imported patch was generated by :hg:`export`, user and
2439 If the imported patch was generated by :hg:`export`, user and
2440 description from patch override values from message headers and
2440 description from patch override values from message headers and
2441 body. Values given on command line with -m/--message and -u/--user
2441 body. Values given on command line with -m/--message and -u/--user
2442 override these.
2442 override these.
2443
2443
2444 If --exact is specified, import will set the working directory to
2444 If --exact is specified, import will set the working directory to
2445 the parent of each patch before applying it, and will abort if the
2445 the parent of each patch before applying it, and will abort if the
2446 resulting changeset has a different ID than the one recorded in
2446 resulting changeset has a different ID than the one recorded in
2447 the patch. This may happen due to character set problems or other
2447 the patch. This may happen due to character set problems or other
2448 deficiencies in the text patch format.
2448 deficiencies in the text patch format.
2449
2449
2450 With -s/--similarity, hg will attempt to discover renames and
2450 With -s/--similarity, hg will attempt to discover renames and
2451 copies in the patch in the same way as 'addremove'.
2451 copies in the patch in the same way as 'addremove'.
2452
2452
2453 To read a patch from standard input, use "-" as the patch name. If
2453 To read a patch from standard input, use "-" as the patch name. If
2454 a URL is specified, the patch will be downloaded from it.
2454 a URL is specified, the patch will be downloaded from it.
2455 See :hg:`help dates` for a list of formats valid for -d/--date.
2455 See :hg:`help dates` for a list of formats valid for -d/--date.
2456
2456
2457 Returns 0 on success.
2457 Returns 0 on success.
2458 """
2458 """
2459 patches = (patch1,) + patches
2459 patches = (patch1,) + patches
2460
2460
2461 date = opts.get('date')
2461 date = opts.get('date')
2462 if date:
2462 if date:
2463 opts['date'] = util.parsedate(date)
2463 opts['date'] = util.parsedate(date)
2464
2464
2465 try:
2465 try:
2466 sim = float(opts.get('similarity') or 0)
2466 sim = float(opts.get('similarity') or 0)
2467 except ValueError:
2467 except ValueError:
2468 raise util.Abort(_('similarity must be a number'))
2468 raise util.Abort(_('similarity must be a number'))
2469 if sim < 0 or sim > 100:
2469 if sim < 0 or sim > 100:
2470 raise util.Abort(_('similarity must be between 0 and 100'))
2470 raise util.Abort(_('similarity must be between 0 and 100'))
2471
2471
2472 if opts.get('exact') or not opts.get('force'):
2472 if opts.get('exact') or not opts.get('force'):
2473 cmdutil.bail_if_changed(repo)
2473 cmdutil.bail_if_changed(repo)
2474
2474
2475 d = opts["base"]
2475 d = opts["base"]
2476 strip = opts["strip"]
2476 strip = opts["strip"]
2477 wlock = lock = None
2477 wlock = lock = None
2478 msgs = []
2478 msgs = []
2479
2479
2480 def tryone(ui, hunk):
2480 def tryone(ui, hunk):
2481 tmpname, message, user, date, branch, nodeid, p1, p2 = \
2481 tmpname, message, user, date, branch, nodeid, p1, p2 = \
2482 patch.extract(ui, hunk)
2482 patch.extract(ui, hunk)
2483
2483
2484 if not tmpname:
2484 if not tmpname:
2485 return None
2485 return None
2486 commitid = _('to working directory')
2486 commitid = _('to working directory')
2487
2487
2488 try:
2488 try:
2489 cmdline_message = cmdutil.logmessage(opts)
2489 cmdline_message = cmdutil.logmessage(opts)
2490 if cmdline_message:
2490 if cmdline_message:
2491 # pickup the cmdline msg
2491 # pickup the cmdline msg
2492 message = cmdline_message
2492 message = cmdline_message
2493 elif message:
2493 elif message:
2494 # pickup the patch msg
2494 # pickup the patch msg
2495 message = message.strip()
2495 message = message.strip()
2496 else:
2496 else:
2497 # launch the editor
2497 # launch the editor
2498 message = None
2498 message = None
2499 ui.debug('message:\n%s\n' % message)
2499 ui.debug('message:\n%s\n' % message)
2500
2500
2501 wp = repo.parents()
2501 wp = repo.parents()
2502 if opts.get('exact'):
2502 if opts.get('exact'):
2503 if not nodeid or not p1:
2503 if not nodeid or not p1:
2504 raise util.Abort(_('not a Mercurial patch'))
2504 raise util.Abort(_('not a Mercurial patch'))
2505 p1 = repo.lookup(p1)
2505 p1 = repo.lookup(p1)
2506 p2 = repo.lookup(p2 or hex(nullid))
2506 p2 = repo.lookup(p2 or hex(nullid))
2507
2507
2508 if p1 != wp[0].node():
2508 if p1 != wp[0].node():
2509 hg.clean(repo, p1)
2509 hg.clean(repo, p1)
2510 repo.dirstate.setparents(p1, p2)
2510 repo.dirstate.setparents(p1, p2)
2511 elif p2:
2511 elif p2:
2512 try:
2512 try:
2513 p1 = repo.lookup(p1)
2513 p1 = repo.lookup(p1)
2514 p2 = repo.lookup(p2)
2514 p2 = repo.lookup(p2)
2515 if p1 == wp[0].node():
2515 if p1 == wp[0].node():
2516 repo.dirstate.setparents(p1, p2)
2516 repo.dirstate.setparents(p1, p2)
2517 except error.RepoError:
2517 except error.RepoError:
2518 pass
2518 pass
2519 if opts.get('exact') or opts.get('import_branch'):
2519 if opts.get('exact') or opts.get('import_branch'):
2520 repo.dirstate.setbranch(branch or 'default')
2520 repo.dirstate.setbranch(branch or 'default')
2521
2521
2522 files = {}
2522 files = {}
2523 try:
2523 try:
2524 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
2524 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
2525 files=files, eolmode=None)
2525 files=files, eolmode=None)
2526 finally:
2526 finally:
2527 files = cmdutil.updatedir(ui, repo, files,
2527 files = cmdutil.updatedir(ui, repo, files,
2528 similarity=sim / 100.0)
2528 similarity=sim / 100.0)
2529 if opts.get('no_commit'):
2529 if opts.get('no_commit'):
2530 if message:
2530 if message:
2531 msgs.append(message)
2531 msgs.append(message)
2532 else:
2532 else:
2533 if opts.get('exact'):
2533 if opts.get('exact'):
2534 m = None
2534 m = None
2535 else:
2535 else:
2536 m = cmdutil.matchfiles(repo, files or [])
2536 m = cmdutil.matchfiles(repo, files or [])
2537 n = repo.commit(message, opts.get('user') or user,
2537 n = repo.commit(message, opts.get('user') or user,
2538 opts.get('date') or date, match=m,
2538 opts.get('date') or date, match=m,
2539 editor=cmdutil.commiteditor)
2539 editor=cmdutil.commiteditor)
2540 if opts.get('exact'):
2540 if opts.get('exact'):
2541 if hex(n) != nodeid:
2541 if hex(n) != nodeid:
2542 repo.rollback()
2542 repo.rollback()
2543 raise util.Abort(_('patch is damaged'
2543 raise util.Abort(_('patch is damaged'
2544 ' or loses information'))
2544 ' or loses information'))
2545 # Force a dirstate write so that the next transaction
2545 # Force a dirstate write so that the next transaction
2546 # backups an up-do-date file.
2546 # backups an up-do-date file.
2547 repo.dirstate.write()
2547 repo.dirstate.write()
2548 if n:
2548 if n:
2549 commitid = short(n)
2549 commitid = short(n)
2550
2550
2551 return commitid
2551 return commitid
2552 finally:
2552 finally:
2553 os.unlink(tmpname)
2553 os.unlink(tmpname)
2554
2554
2555 try:
2555 try:
2556 wlock = repo.wlock()
2556 wlock = repo.wlock()
2557 lock = repo.lock()
2557 lock = repo.lock()
2558 lastcommit = None
2558 lastcommit = None
2559 for p in patches:
2559 for p in patches:
2560 pf = os.path.join(d, p)
2560 pf = os.path.join(d, p)
2561
2561
2562 if pf == '-':
2562 if pf == '-':
2563 ui.status(_("applying patch from stdin\n"))
2563 ui.status(_("applying patch from stdin\n"))
2564 pf = sys.stdin
2564 pf = sys.stdin
2565 else:
2565 else:
2566 ui.status(_("applying %s\n") % p)
2566 ui.status(_("applying %s\n") % p)
2567 pf = url.open(ui, pf)
2567 pf = url.open(ui, pf)
2568
2568
2569 haspatch = False
2569 haspatch = False
2570 for hunk in patch.split(pf):
2570 for hunk in patch.split(pf):
2571 commitid = tryone(ui, hunk)
2571 commitid = tryone(ui, hunk)
2572 if commitid:
2572 if commitid:
2573 haspatch = True
2573 haspatch = True
2574 if lastcommit:
2574 if lastcommit:
2575 ui.status(_('applied %s\n') % lastcommit)
2575 ui.status(_('applied %s\n') % lastcommit)
2576 lastcommit = commitid
2576 lastcommit = commitid
2577
2577
2578 if not haspatch:
2578 if not haspatch:
2579 raise util.Abort(_('no diffs found'))
2579 raise util.Abort(_('no diffs found'))
2580
2580
2581 if msgs:
2581 if msgs:
2582 repo.opener('last-message.txt', 'wb').write('\n* * *\n'.join(msgs))
2582 repo.opener('last-message.txt', 'wb').write('\n* * *\n'.join(msgs))
2583 finally:
2583 finally:
2584 release(lock, wlock)
2584 release(lock, wlock)
2585
2585
2586 def incoming(ui, repo, source="default", **opts):
2586 def incoming(ui, repo, source="default", **opts):
2587 """show new changesets found in source
2587 """show new changesets found in source
2588
2588
2589 Show new changesets found in the specified path/URL or the default
2589 Show new changesets found in the specified path/URL or the default
2590 pull location. These are the changesets that would have been pulled
2590 pull location. These are the changesets that would have been pulled
2591 if a pull at the time you issued this command.
2591 if a pull at the time you issued this command.
2592
2592
2593 For remote repository, using --bundle avoids downloading the
2593 For remote repository, using --bundle avoids downloading the
2594 changesets twice if the incoming is followed by a pull.
2594 changesets twice if the incoming is followed by a pull.
2595
2595
2596 See pull for valid source format details.
2596 See pull for valid source format details.
2597
2597
2598 Returns 0 if there are incoming changes, 1 otherwise.
2598 Returns 0 if there are incoming changes, 1 otherwise.
2599 """
2599 """
2600 if opts.get('bundle') and opts.get('subrepos'):
2600 if opts.get('bundle') and opts.get('subrepos'):
2601 raise util.Abort(_('cannot combine --bundle and --subrepos'))
2601 raise util.Abort(_('cannot combine --bundle and --subrepos'))
2602
2602
2603 if opts.get('bookmarks'):
2603 if opts.get('bookmarks'):
2604 source, branches = hg.parseurl(ui.expandpath(source),
2604 source, branches = hg.parseurl(ui.expandpath(source),
2605 opts.get('branch'))
2605 opts.get('branch'))
2606 other = hg.repository(hg.remoteui(repo, opts), source)
2606 other = hg.repository(hg.remoteui(repo, opts), source)
2607 if 'bookmarks' not in other.listkeys('namespaces'):
2607 if 'bookmarks' not in other.listkeys('namespaces'):
2608 ui.warn(_("remote doesn't support bookmarks\n"))
2608 ui.warn(_("remote doesn't support bookmarks\n"))
2609 return 0
2609 return 0
2610 ui.status(_('comparing with %s\n') % url.hidepassword(source))
2610 ui.status(_('comparing with %s\n') % util.hidepassword(source))
2611 return bookmarks.diff(ui, repo, other)
2611 return bookmarks.diff(ui, repo, other)
2612
2612
2613 ret = hg.incoming(ui, repo, source, opts)
2613 ret = hg.incoming(ui, repo, source, opts)
2614 return ret
2614 return ret
2615
2615
2616 def init(ui, dest=".", **opts):
2616 def init(ui, dest=".", **opts):
2617 """create a new repository in the given directory
2617 """create a new repository in the given directory
2618
2618
2619 Initialize a new repository in the given directory. If the given
2619 Initialize a new repository in the given directory. If the given
2620 directory does not exist, it will be created.
2620 directory does not exist, it will be created.
2621
2621
2622 If no directory is given, the current directory is used.
2622 If no directory is given, the current directory is used.
2623
2623
2624 It is possible to specify an ``ssh://`` URL as the destination.
2624 It is possible to specify an ``ssh://`` URL as the destination.
2625 See :hg:`help urls` for more information.
2625 See :hg:`help urls` for more information.
2626
2626
2627 Returns 0 on success.
2627 Returns 0 on success.
2628 """
2628 """
2629 hg.repository(hg.remoteui(ui, opts), ui.expandpath(dest), create=1)
2629 hg.repository(hg.remoteui(ui, opts), ui.expandpath(dest), create=1)
2630
2630
2631 def locate(ui, repo, *pats, **opts):
2631 def locate(ui, repo, *pats, **opts):
2632 """locate files matching specific patterns
2632 """locate files matching specific patterns
2633
2633
2634 Print files under Mercurial control in the working directory whose
2634 Print files under Mercurial control in the working directory whose
2635 names match the given patterns.
2635 names match the given patterns.
2636
2636
2637 By default, this command searches all directories in the working
2637 By default, this command searches all directories in the working
2638 directory. To search just the current directory and its
2638 directory. To search just the current directory and its
2639 subdirectories, use "--include .".
2639 subdirectories, use "--include .".
2640
2640
2641 If no patterns are given to match, this command prints the names
2641 If no patterns are given to match, this command prints the names
2642 of all files under Mercurial control in the working directory.
2642 of all files under Mercurial control in the working directory.
2643
2643
2644 If you want to feed the output of this command into the "xargs"
2644 If you want to feed the output of this command into the "xargs"
2645 command, use the -0 option to both this command and "xargs". This
2645 command, use the -0 option to both this command and "xargs". This
2646 will avoid the problem of "xargs" treating single filenames that
2646 will avoid the problem of "xargs" treating single filenames that
2647 contain whitespace as multiple filenames.
2647 contain whitespace as multiple filenames.
2648
2648
2649 Returns 0 if a match is found, 1 otherwise.
2649 Returns 0 if a match is found, 1 otherwise.
2650 """
2650 """
2651 end = opts.get('print0') and '\0' or '\n'
2651 end = opts.get('print0') and '\0' or '\n'
2652 rev = cmdutil.revsingle(repo, opts.get('rev'), None).node()
2652 rev = cmdutil.revsingle(repo, opts.get('rev'), None).node()
2653
2653
2654 ret = 1
2654 ret = 1
2655 m = cmdutil.match(repo, pats, opts, default='relglob')
2655 m = cmdutil.match(repo, pats, opts, default='relglob')
2656 m.bad = lambda x, y: False
2656 m.bad = lambda x, y: False
2657 for abs in repo[rev].walk(m):
2657 for abs in repo[rev].walk(m):
2658 if not rev and abs not in repo.dirstate:
2658 if not rev and abs not in repo.dirstate:
2659 continue
2659 continue
2660 if opts.get('fullpath'):
2660 if opts.get('fullpath'):
2661 ui.write(repo.wjoin(abs), end)
2661 ui.write(repo.wjoin(abs), end)
2662 else:
2662 else:
2663 ui.write(((pats and m.rel(abs)) or abs), end)
2663 ui.write(((pats and m.rel(abs)) or abs), end)
2664 ret = 0
2664 ret = 0
2665
2665
2666 return ret
2666 return ret
2667
2667
2668 def log(ui, repo, *pats, **opts):
2668 def log(ui, repo, *pats, **opts):
2669 """show revision history of entire repository or files
2669 """show revision history of entire repository or files
2670
2670
2671 Print the revision history of the specified files or the entire
2671 Print the revision history of the specified files or the entire
2672 project.
2672 project.
2673
2673
2674 File history is shown without following rename or copy history of
2674 File history is shown without following rename or copy history of
2675 files. Use -f/--follow with a filename to follow history across
2675 files. Use -f/--follow with a filename to follow history across
2676 renames and copies. --follow without a filename will only show
2676 renames and copies. --follow without a filename will only show
2677 ancestors or descendants of the starting revision. --follow-first
2677 ancestors or descendants of the starting revision. --follow-first
2678 only follows the first parent of merge revisions.
2678 only follows the first parent of merge revisions.
2679
2679
2680 If no revision range is specified, the default is ``tip:0`` unless
2680 If no revision range is specified, the default is ``tip:0`` unless
2681 --follow is set, in which case the working directory parent is
2681 --follow is set, in which case the working directory parent is
2682 used as the starting revision. You can specify a revision set for
2682 used as the starting revision. You can specify a revision set for
2683 log, see :hg:`help revsets` for more information.
2683 log, see :hg:`help revsets` for more information.
2684
2684
2685 See :hg:`help dates` for a list of formats valid for -d/--date.
2685 See :hg:`help dates` for a list of formats valid for -d/--date.
2686
2686
2687 By default this command prints revision number and changeset id,
2687 By default this command prints revision number and changeset id,
2688 tags, non-trivial parents, user, date and time, and a summary for
2688 tags, non-trivial parents, user, date and time, and a summary for
2689 each commit. When the -v/--verbose switch is used, the list of
2689 each commit. When the -v/--verbose switch is used, the list of
2690 changed files and full commit message are shown.
2690 changed files and full commit message are shown.
2691
2691
2692 .. note::
2692 .. note::
2693 log -p/--patch may generate unexpected diff output for merge
2693 log -p/--patch may generate unexpected diff output for merge
2694 changesets, as it will only compare the merge changeset against
2694 changesets, as it will only compare the merge changeset against
2695 its first parent. Also, only files different from BOTH parents
2695 its first parent. Also, only files different from BOTH parents
2696 will appear in files:.
2696 will appear in files:.
2697
2697
2698 Returns 0 on success.
2698 Returns 0 on success.
2699 """
2699 """
2700
2700
2701 matchfn = cmdutil.match(repo, pats, opts)
2701 matchfn = cmdutil.match(repo, pats, opts)
2702 limit = cmdutil.loglimit(opts)
2702 limit = cmdutil.loglimit(opts)
2703 count = 0
2703 count = 0
2704
2704
2705 endrev = None
2705 endrev = None
2706 if opts.get('copies') and opts.get('rev'):
2706 if opts.get('copies') and opts.get('rev'):
2707 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2707 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2708
2708
2709 df = False
2709 df = False
2710 if opts["date"]:
2710 if opts["date"]:
2711 df = util.matchdate(opts["date"])
2711 df = util.matchdate(opts["date"])
2712
2712
2713 branches = opts.get('branch', []) + opts.get('only_branch', [])
2713 branches = opts.get('branch', []) + opts.get('only_branch', [])
2714 opts['branch'] = [repo.lookupbranch(b) for b in branches]
2714 opts['branch'] = [repo.lookupbranch(b) for b in branches]
2715
2715
2716 displayer = cmdutil.show_changeset(ui, repo, opts, True)
2716 displayer = cmdutil.show_changeset(ui, repo, opts, True)
2717 def prep(ctx, fns):
2717 def prep(ctx, fns):
2718 rev = ctx.rev()
2718 rev = ctx.rev()
2719 parents = [p for p in repo.changelog.parentrevs(rev)
2719 parents = [p for p in repo.changelog.parentrevs(rev)
2720 if p != nullrev]
2720 if p != nullrev]
2721 if opts.get('no_merges') and len(parents) == 2:
2721 if opts.get('no_merges') and len(parents) == 2:
2722 return
2722 return
2723 if opts.get('only_merges') and len(parents) != 2:
2723 if opts.get('only_merges') and len(parents) != 2:
2724 return
2724 return
2725 if opts.get('branch') and ctx.branch() not in opts['branch']:
2725 if opts.get('branch') and ctx.branch() not in opts['branch']:
2726 return
2726 return
2727 if df and not df(ctx.date()[0]):
2727 if df and not df(ctx.date()[0]):
2728 return
2728 return
2729 if opts['user'] and not [k for k in opts['user']
2729 if opts['user'] and not [k for k in opts['user']
2730 if k.lower() in ctx.user().lower()]:
2730 if k.lower() in ctx.user().lower()]:
2731 return
2731 return
2732 if opts.get('keyword'):
2732 if opts.get('keyword'):
2733 for k in [kw.lower() for kw in opts['keyword']]:
2733 for k in [kw.lower() for kw in opts['keyword']]:
2734 if (k in ctx.user().lower() or
2734 if (k in ctx.user().lower() or
2735 k in ctx.description().lower() or
2735 k in ctx.description().lower() or
2736 k in " ".join(ctx.files()).lower()):
2736 k in " ".join(ctx.files()).lower()):
2737 break
2737 break
2738 else:
2738 else:
2739 return
2739 return
2740
2740
2741 copies = None
2741 copies = None
2742 if opts.get('copies') and rev:
2742 if opts.get('copies') and rev:
2743 copies = []
2743 copies = []
2744 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2744 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2745 for fn in ctx.files():
2745 for fn in ctx.files():
2746 rename = getrenamed(fn, rev)
2746 rename = getrenamed(fn, rev)
2747 if rename:
2747 if rename:
2748 copies.append((fn, rename[0]))
2748 copies.append((fn, rename[0]))
2749
2749
2750 revmatchfn = None
2750 revmatchfn = None
2751 if opts.get('patch') or opts.get('stat'):
2751 if opts.get('patch') or opts.get('stat'):
2752 if opts.get('follow') or opts.get('follow_first'):
2752 if opts.get('follow') or opts.get('follow_first'):
2753 # note: this might be wrong when following through merges
2753 # note: this might be wrong when following through merges
2754 revmatchfn = cmdutil.match(repo, fns, default='path')
2754 revmatchfn = cmdutil.match(repo, fns, default='path')
2755 else:
2755 else:
2756 revmatchfn = matchfn
2756 revmatchfn = matchfn
2757
2757
2758 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2758 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2759
2759
2760 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2760 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2761 if count == limit:
2761 if count == limit:
2762 break
2762 break
2763 if displayer.flush(ctx.rev()):
2763 if displayer.flush(ctx.rev()):
2764 count += 1
2764 count += 1
2765 displayer.close()
2765 displayer.close()
2766
2766
2767 def manifest(ui, repo, node=None, rev=None):
2767 def manifest(ui, repo, node=None, rev=None):
2768 """output the current or given revision of the project manifest
2768 """output the current or given revision of the project manifest
2769
2769
2770 Print a list of version controlled files for the given revision.
2770 Print a list of version controlled files for the given revision.
2771 If no revision is given, the first parent of the working directory
2771 If no revision is given, the first parent of the working directory
2772 is used, or the null revision if no revision is checked out.
2772 is used, or the null revision if no revision is checked out.
2773
2773
2774 With -v, print file permissions, symlink and executable bits.
2774 With -v, print file permissions, symlink and executable bits.
2775 With --debug, print file revision hashes.
2775 With --debug, print file revision hashes.
2776
2776
2777 Returns 0 on success.
2777 Returns 0 on success.
2778 """
2778 """
2779
2779
2780 if rev and node:
2780 if rev and node:
2781 raise util.Abort(_("please specify just one revision"))
2781 raise util.Abort(_("please specify just one revision"))
2782
2782
2783 if not node:
2783 if not node:
2784 node = rev
2784 node = rev
2785
2785
2786 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2786 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2787 ctx = cmdutil.revsingle(repo, node)
2787 ctx = cmdutil.revsingle(repo, node)
2788 for f in ctx:
2788 for f in ctx:
2789 if ui.debugflag:
2789 if ui.debugflag:
2790 ui.write("%40s " % hex(ctx.manifest()[f]))
2790 ui.write("%40s " % hex(ctx.manifest()[f]))
2791 if ui.verbose:
2791 if ui.verbose:
2792 ui.write(decor[ctx.flags(f)])
2792 ui.write(decor[ctx.flags(f)])
2793 ui.write("%s\n" % f)
2793 ui.write("%s\n" % f)
2794
2794
2795 def merge(ui, repo, node=None, **opts):
2795 def merge(ui, repo, node=None, **opts):
2796 """merge working directory with another revision
2796 """merge working directory with another revision
2797
2797
2798 The current working directory is updated with all changes made in
2798 The current working directory is updated with all changes made in
2799 the requested revision since the last common predecessor revision.
2799 the requested revision since the last common predecessor revision.
2800
2800
2801 Files that changed between either parent are marked as changed for
2801 Files that changed between either parent are marked as changed for
2802 the next commit and a commit must be performed before any further
2802 the next commit and a commit must be performed before any further
2803 updates to the repository are allowed. The next commit will have
2803 updates to the repository are allowed. The next commit will have
2804 two parents.
2804 two parents.
2805
2805
2806 ``--tool`` can be used to specify the merge tool used for file
2806 ``--tool`` can be used to specify the merge tool used for file
2807 merges. It overrides the HGMERGE environment variable and your
2807 merges. It overrides the HGMERGE environment variable and your
2808 configuration files. See :hg:`help merge-tools` for options.
2808 configuration files. See :hg:`help merge-tools` for options.
2809
2809
2810 If no revision is specified, the working directory's parent is a
2810 If no revision is specified, the working directory's parent is a
2811 head revision, and the current branch contains exactly one other
2811 head revision, and the current branch contains exactly one other
2812 head, the other head is merged with by default. Otherwise, an
2812 head, the other head is merged with by default. Otherwise, an
2813 explicit revision with which to merge with must be provided.
2813 explicit revision with which to merge with must be provided.
2814
2814
2815 :hg:`resolve` must be used to resolve unresolved files.
2815 :hg:`resolve` must be used to resolve unresolved files.
2816
2816
2817 To undo an uncommitted merge, use :hg:`update --clean .` which
2817 To undo an uncommitted merge, use :hg:`update --clean .` which
2818 will check out a clean copy of the original merge parent, losing
2818 will check out a clean copy of the original merge parent, losing
2819 all changes.
2819 all changes.
2820
2820
2821 Returns 0 on success, 1 if there are unresolved files.
2821 Returns 0 on success, 1 if there are unresolved files.
2822 """
2822 """
2823
2823
2824 if opts.get('rev') and node:
2824 if opts.get('rev') and node:
2825 raise util.Abort(_("please specify just one revision"))
2825 raise util.Abort(_("please specify just one revision"))
2826 if not node:
2826 if not node:
2827 node = opts.get('rev')
2827 node = opts.get('rev')
2828
2828
2829 if not node:
2829 if not node:
2830 branch = repo[None].branch()
2830 branch = repo[None].branch()
2831 bheads = repo.branchheads(branch)
2831 bheads = repo.branchheads(branch)
2832 if len(bheads) > 2:
2832 if len(bheads) > 2:
2833 raise util.Abort(_(
2833 raise util.Abort(_(
2834 'branch \'%s\' has %d heads - '
2834 'branch \'%s\' has %d heads - '
2835 'please merge with an explicit rev\n'
2835 'please merge with an explicit rev\n'
2836 '(run \'hg heads .\' to see heads)')
2836 '(run \'hg heads .\' to see heads)')
2837 % (branch, len(bheads)))
2837 % (branch, len(bheads)))
2838
2838
2839 parent = repo.dirstate.p1()
2839 parent = repo.dirstate.p1()
2840 if len(bheads) == 1:
2840 if len(bheads) == 1:
2841 if len(repo.heads()) > 1:
2841 if len(repo.heads()) > 1:
2842 raise util.Abort(_(
2842 raise util.Abort(_(
2843 'branch \'%s\' has one head - '
2843 'branch \'%s\' has one head - '
2844 'please merge with an explicit rev\n'
2844 'please merge with an explicit rev\n'
2845 '(run \'hg heads\' to see all heads)')
2845 '(run \'hg heads\' to see all heads)')
2846 % branch)
2846 % branch)
2847 msg = _('there is nothing to merge')
2847 msg = _('there is nothing to merge')
2848 if parent != repo.lookup(repo[None].branch()):
2848 if parent != repo.lookup(repo[None].branch()):
2849 msg = _('%s - use "hg update" instead') % msg
2849 msg = _('%s - use "hg update" instead') % msg
2850 raise util.Abort(msg)
2850 raise util.Abort(msg)
2851
2851
2852 if parent not in bheads:
2852 if parent not in bheads:
2853 raise util.Abort(_('working dir not at a head rev - '
2853 raise util.Abort(_('working dir not at a head rev - '
2854 'use "hg update" or merge with an explicit rev'))
2854 'use "hg update" or merge with an explicit rev'))
2855 node = parent == bheads[0] and bheads[-1] or bheads[0]
2855 node = parent == bheads[0] and bheads[-1] or bheads[0]
2856 else:
2856 else:
2857 node = cmdutil.revsingle(repo, node).node()
2857 node = cmdutil.revsingle(repo, node).node()
2858
2858
2859 if opts.get('preview'):
2859 if opts.get('preview'):
2860 # find nodes that are ancestors of p2 but not of p1
2860 # find nodes that are ancestors of p2 but not of p1
2861 p1 = repo.lookup('.')
2861 p1 = repo.lookup('.')
2862 p2 = repo.lookup(node)
2862 p2 = repo.lookup(node)
2863 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2863 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2864
2864
2865 displayer = cmdutil.show_changeset(ui, repo, opts)
2865 displayer = cmdutil.show_changeset(ui, repo, opts)
2866 for node in nodes:
2866 for node in nodes:
2867 displayer.show(repo[node])
2867 displayer.show(repo[node])
2868 displayer.close()
2868 displayer.close()
2869 return 0
2869 return 0
2870
2870
2871 try:
2871 try:
2872 # ui.forcemerge is an internal variable, do not document
2872 # ui.forcemerge is an internal variable, do not document
2873 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2873 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2874 return hg.merge(repo, node, force=opts.get('force'))
2874 return hg.merge(repo, node, force=opts.get('force'))
2875 finally:
2875 finally:
2876 ui.setconfig('ui', 'forcemerge', '')
2876 ui.setconfig('ui', 'forcemerge', '')
2877
2877
2878 def outgoing(ui, repo, dest=None, **opts):
2878 def outgoing(ui, repo, dest=None, **opts):
2879 """show changesets not found in the destination
2879 """show changesets not found in the destination
2880
2880
2881 Show changesets not found in the specified destination repository
2881 Show changesets not found in the specified destination repository
2882 or the default push location. These are the changesets that would
2882 or the default push location. These are the changesets that would
2883 be pushed if a push was requested.
2883 be pushed if a push was requested.
2884
2884
2885 See pull for details of valid destination formats.
2885 See pull for details of valid destination formats.
2886
2886
2887 Returns 0 if there are outgoing changes, 1 otherwise.
2887 Returns 0 if there are outgoing changes, 1 otherwise.
2888 """
2888 """
2889
2889
2890 if opts.get('bookmarks'):
2890 if opts.get('bookmarks'):
2891 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2891 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2892 dest, branches = hg.parseurl(dest, opts.get('branch'))
2892 dest, branches = hg.parseurl(dest, opts.get('branch'))
2893 other = hg.repository(hg.remoteui(repo, opts), dest)
2893 other = hg.repository(hg.remoteui(repo, opts), dest)
2894 if 'bookmarks' not in other.listkeys('namespaces'):
2894 if 'bookmarks' not in other.listkeys('namespaces'):
2895 ui.warn(_("remote doesn't support bookmarks\n"))
2895 ui.warn(_("remote doesn't support bookmarks\n"))
2896 return 0
2896 return 0
2897 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2897 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
2898 return bookmarks.diff(ui, other, repo)
2898 return bookmarks.diff(ui, other, repo)
2899
2899
2900 ret = hg.outgoing(ui, repo, dest, opts)
2900 ret = hg.outgoing(ui, repo, dest, opts)
2901 return ret
2901 return ret
2902
2902
2903 def parents(ui, repo, file_=None, **opts):
2903 def parents(ui, repo, file_=None, **opts):
2904 """show the parents of the working directory or revision
2904 """show the parents of the working directory or revision
2905
2905
2906 Print the working directory's parent revisions. If a revision is
2906 Print the working directory's parent revisions. If a revision is
2907 given via -r/--rev, the parent of that revision will be printed.
2907 given via -r/--rev, the parent of that revision will be printed.
2908 If a file argument is given, the revision in which the file was
2908 If a file argument is given, the revision in which the file was
2909 last changed (before the working directory revision or the
2909 last changed (before the working directory revision or the
2910 argument to --rev if given) is printed.
2910 argument to --rev if given) is printed.
2911
2911
2912 Returns 0 on success.
2912 Returns 0 on success.
2913 """
2913 """
2914
2914
2915 ctx = cmdutil.revsingle(repo, opts.get('rev'), None)
2915 ctx = cmdutil.revsingle(repo, opts.get('rev'), None)
2916
2916
2917 if file_:
2917 if file_:
2918 m = cmdutil.match(repo, (file_,), opts)
2918 m = cmdutil.match(repo, (file_,), opts)
2919 if m.anypats() or len(m.files()) != 1:
2919 if m.anypats() or len(m.files()) != 1:
2920 raise util.Abort(_('can only specify an explicit filename'))
2920 raise util.Abort(_('can only specify an explicit filename'))
2921 file_ = m.files()[0]
2921 file_ = m.files()[0]
2922 filenodes = []
2922 filenodes = []
2923 for cp in ctx.parents():
2923 for cp in ctx.parents():
2924 if not cp:
2924 if not cp:
2925 continue
2925 continue
2926 try:
2926 try:
2927 filenodes.append(cp.filenode(file_))
2927 filenodes.append(cp.filenode(file_))
2928 except error.LookupError:
2928 except error.LookupError:
2929 pass
2929 pass
2930 if not filenodes:
2930 if not filenodes:
2931 raise util.Abort(_("'%s' not found in manifest!") % file_)
2931 raise util.Abort(_("'%s' not found in manifest!") % file_)
2932 fl = repo.file(file_)
2932 fl = repo.file(file_)
2933 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2933 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2934 else:
2934 else:
2935 p = [cp.node() for cp in ctx.parents()]
2935 p = [cp.node() for cp in ctx.parents()]
2936
2936
2937 displayer = cmdutil.show_changeset(ui, repo, opts)
2937 displayer = cmdutil.show_changeset(ui, repo, opts)
2938 for n in p:
2938 for n in p:
2939 if n != nullid:
2939 if n != nullid:
2940 displayer.show(repo[n])
2940 displayer.show(repo[n])
2941 displayer.close()
2941 displayer.close()
2942
2942
2943 def paths(ui, repo, search=None):
2943 def paths(ui, repo, search=None):
2944 """show aliases for remote repositories
2944 """show aliases for remote repositories
2945
2945
2946 Show definition of symbolic path name NAME. If no name is given,
2946 Show definition of symbolic path name NAME. If no name is given,
2947 show definition of all available names.
2947 show definition of all available names.
2948
2948
2949 Path names are defined in the [paths] section of your
2949 Path names are defined in the [paths] section of your
2950 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
2950 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
2951 repository, ``.hg/hgrc`` is used, too.
2951 repository, ``.hg/hgrc`` is used, too.
2952
2952
2953 The path names ``default`` and ``default-push`` have a special
2953 The path names ``default`` and ``default-push`` have a special
2954 meaning. When performing a push or pull operation, they are used
2954 meaning. When performing a push or pull operation, they are used
2955 as fallbacks if no location is specified on the command-line.
2955 as fallbacks if no location is specified on the command-line.
2956 When ``default-push`` is set, it will be used for push and
2956 When ``default-push`` is set, it will be used for push and
2957 ``default`` will be used for pull; otherwise ``default`` is used
2957 ``default`` will be used for pull; otherwise ``default`` is used
2958 as the fallback for both. When cloning a repository, the clone
2958 as the fallback for both. When cloning a repository, the clone
2959 source is written as ``default`` in ``.hg/hgrc``. Note that
2959 source is written as ``default`` in ``.hg/hgrc``. Note that
2960 ``default`` and ``default-push`` apply to all inbound (e.g.
2960 ``default`` and ``default-push`` apply to all inbound (e.g.
2961 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
2961 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
2962 :hg:`bundle`) operations.
2962 :hg:`bundle`) operations.
2963
2963
2964 See :hg:`help urls` for more information.
2964 See :hg:`help urls` for more information.
2965
2965
2966 Returns 0 on success.
2966 Returns 0 on success.
2967 """
2967 """
2968 if search:
2968 if search:
2969 for name, path in ui.configitems("paths"):
2969 for name, path in ui.configitems("paths"):
2970 if name == search:
2970 if name == search:
2971 ui.write("%s\n" % url.hidepassword(path))
2971 ui.write("%s\n" % util.hidepassword(path))
2972 return
2972 return
2973 ui.warn(_("not found!\n"))
2973 ui.warn(_("not found!\n"))
2974 return 1
2974 return 1
2975 else:
2975 else:
2976 for name, path in ui.configitems("paths"):
2976 for name, path in ui.configitems("paths"):
2977 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2977 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
2978
2978
2979 def postincoming(ui, repo, modheads, optupdate, checkout):
2979 def postincoming(ui, repo, modheads, optupdate, checkout):
2980 if modheads == 0:
2980 if modheads == 0:
2981 return
2981 return
2982 if optupdate:
2982 if optupdate:
2983 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2983 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2984 return hg.update(repo, checkout)
2984 return hg.update(repo, checkout)
2985 else:
2985 else:
2986 ui.status(_("not updating, since new heads added\n"))
2986 ui.status(_("not updating, since new heads added\n"))
2987 if modheads > 1:
2987 if modheads > 1:
2988 currentbranchheads = len(repo.branchheads())
2988 currentbranchheads = len(repo.branchheads())
2989 if currentbranchheads == modheads:
2989 if currentbranchheads == modheads:
2990 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2990 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2991 elif currentbranchheads > 1:
2991 elif currentbranchheads > 1:
2992 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
2992 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
2993 else:
2993 else:
2994 ui.status(_("(run 'hg heads' to see heads)\n"))
2994 ui.status(_("(run 'hg heads' to see heads)\n"))
2995 else:
2995 else:
2996 ui.status(_("(run 'hg update' to get a working copy)\n"))
2996 ui.status(_("(run 'hg update' to get a working copy)\n"))
2997
2997
2998 def pull(ui, repo, source="default", **opts):
2998 def pull(ui, repo, source="default", **opts):
2999 """pull changes from the specified source
2999 """pull changes from the specified source
3000
3000
3001 Pull changes from a remote repository to a local one.
3001 Pull changes from a remote repository to a local one.
3002
3002
3003 This finds all changes from the repository at the specified path
3003 This finds all changes from the repository at the specified path
3004 or URL and adds them to a local repository (the current one unless
3004 or URL and adds them to a local repository (the current one unless
3005 -R is specified). By default, this does not update the copy of the
3005 -R is specified). By default, this does not update the copy of the
3006 project in the working directory.
3006 project in the working directory.
3007
3007
3008 Use :hg:`incoming` if you want to see what would have been added
3008 Use :hg:`incoming` if you want to see what would have been added
3009 by a pull at the time you issued this command. If you then decide
3009 by a pull at the time you issued this command. If you then decide
3010 to add those changes to the repository, you should use :hg:`pull
3010 to add those changes to the repository, you should use :hg:`pull
3011 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3011 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3012
3012
3013 If SOURCE is omitted, the 'default' path will be used.
3013 If SOURCE is omitted, the 'default' path will be used.
3014 See :hg:`help urls` for more information.
3014 See :hg:`help urls` for more information.
3015
3015
3016 Returns 0 on success, 1 if an update had unresolved files.
3016 Returns 0 on success, 1 if an update had unresolved files.
3017 """
3017 """
3018 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3018 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3019 other = hg.repository(hg.remoteui(repo, opts), source)
3019 other = hg.repository(hg.remoteui(repo, opts), source)
3020 ui.status(_('pulling from %s\n') % url.hidepassword(source))
3020 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3021 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3021 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3022
3022
3023 if opts.get('bookmark'):
3023 if opts.get('bookmark'):
3024 if not revs:
3024 if not revs:
3025 revs = []
3025 revs = []
3026 rb = other.listkeys('bookmarks')
3026 rb = other.listkeys('bookmarks')
3027 for b in opts['bookmark']:
3027 for b in opts['bookmark']:
3028 if b not in rb:
3028 if b not in rb:
3029 raise util.Abort(_('remote bookmark %s not found!') % b)
3029 raise util.Abort(_('remote bookmark %s not found!') % b)
3030 revs.append(rb[b])
3030 revs.append(rb[b])
3031
3031
3032 if revs:
3032 if revs:
3033 try:
3033 try:
3034 revs = [other.lookup(rev) for rev in revs]
3034 revs = [other.lookup(rev) for rev in revs]
3035 except error.CapabilityError:
3035 except error.CapabilityError:
3036 err = _("other repository doesn't support revision lookup, "
3036 err = _("other repository doesn't support revision lookup, "
3037 "so a rev cannot be specified.")
3037 "so a rev cannot be specified.")
3038 raise util.Abort(err)
3038 raise util.Abort(err)
3039
3039
3040 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
3040 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
3041 bookmarks.updatefromremote(ui, repo, other)
3041 bookmarks.updatefromremote(ui, repo, other)
3042 if checkout:
3042 if checkout:
3043 checkout = str(repo.changelog.rev(other.lookup(checkout)))
3043 checkout = str(repo.changelog.rev(other.lookup(checkout)))
3044 repo._subtoppath = source
3044 repo._subtoppath = source
3045 try:
3045 try:
3046 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
3046 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
3047
3047
3048 finally:
3048 finally:
3049 del repo._subtoppath
3049 del repo._subtoppath
3050
3050
3051 # update specified bookmarks
3051 # update specified bookmarks
3052 if opts.get('bookmark'):
3052 if opts.get('bookmark'):
3053 for b in opts['bookmark']:
3053 for b in opts['bookmark']:
3054 # explicit pull overrides local bookmark if any
3054 # explicit pull overrides local bookmark if any
3055 ui.status(_("importing bookmark %s\n") % b)
3055 ui.status(_("importing bookmark %s\n") % b)
3056 repo._bookmarks[b] = repo[rb[b]].node()
3056 repo._bookmarks[b] = repo[rb[b]].node()
3057 bookmarks.write(repo)
3057 bookmarks.write(repo)
3058
3058
3059 return ret
3059 return ret
3060
3060
3061 def push(ui, repo, dest=None, **opts):
3061 def push(ui, repo, dest=None, **opts):
3062 """push changes to the specified destination
3062 """push changes to the specified destination
3063
3063
3064 Push changesets from the local repository to the specified
3064 Push changesets from the local repository to the specified
3065 destination.
3065 destination.
3066
3066
3067 This operation is symmetrical to pull: it is identical to a pull
3067 This operation is symmetrical to pull: it is identical to a pull
3068 in the destination repository from the current one.
3068 in the destination repository from the current one.
3069
3069
3070 By default, push will not allow creation of new heads at the
3070 By default, push will not allow creation of new heads at the
3071 destination, since multiple heads would make it unclear which head
3071 destination, since multiple heads would make it unclear which head
3072 to use. In this situation, it is recommended to pull and merge
3072 to use. In this situation, it is recommended to pull and merge
3073 before pushing.
3073 before pushing.
3074
3074
3075 Use --new-branch if you want to allow push to create a new named
3075 Use --new-branch if you want to allow push to create a new named
3076 branch that is not present at the destination. This allows you to
3076 branch that is not present at the destination. This allows you to
3077 only create a new branch without forcing other changes.
3077 only create a new branch without forcing other changes.
3078
3078
3079 Use -f/--force to override the default behavior and push all
3079 Use -f/--force to override the default behavior and push all
3080 changesets on all branches.
3080 changesets on all branches.
3081
3081
3082 If -r/--rev is used, the specified revision and all its ancestors
3082 If -r/--rev is used, the specified revision and all its ancestors
3083 will be pushed to the remote repository.
3083 will be pushed to the remote repository.
3084
3084
3085 Please see :hg:`help urls` for important details about ``ssh://``
3085 Please see :hg:`help urls` for important details about ``ssh://``
3086 URLs. If DESTINATION is omitted, a default path will be used.
3086 URLs. If DESTINATION is omitted, a default path will be used.
3087
3087
3088 Returns 0 if push was successful, 1 if nothing to push.
3088 Returns 0 if push was successful, 1 if nothing to push.
3089 """
3089 """
3090
3090
3091 if opts.get('bookmark'):
3091 if opts.get('bookmark'):
3092 for b in opts['bookmark']:
3092 for b in opts['bookmark']:
3093 # translate -B options to -r so changesets get pushed
3093 # translate -B options to -r so changesets get pushed
3094 if b in repo._bookmarks:
3094 if b in repo._bookmarks:
3095 opts.setdefault('rev', []).append(b)
3095 opts.setdefault('rev', []).append(b)
3096 else:
3096 else:
3097 # if we try to push a deleted bookmark, translate it to null
3097 # if we try to push a deleted bookmark, translate it to null
3098 # this lets simultaneous -r, -b options continue working
3098 # this lets simultaneous -r, -b options continue working
3099 opts.setdefault('rev', []).append("null")
3099 opts.setdefault('rev', []).append("null")
3100
3100
3101 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3101 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3102 dest, branches = hg.parseurl(dest, opts.get('branch'))
3102 dest, branches = hg.parseurl(dest, opts.get('branch'))
3103 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
3103 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
3104 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
3104 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
3105 other = hg.repository(hg.remoteui(repo, opts), dest)
3105 other = hg.repository(hg.remoteui(repo, opts), dest)
3106 if revs:
3106 if revs:
3107 revs = [repo.lookup(rev) for rev in revs]
3107 revs = [repo.lookup(rev) for rev in revs]
3108
3108
3109 repo._subtoppath = dest
3109 repo._subtoppath = dest
3110 try:
3110 try:
3111 # push subrepos depth-first for coherent ordering
3111 # push subrepos depth-first for coherent ordering
3112 c = repo['']
3112 c = repo['']
3113 subs = c.substate # only repos that are committed
3113 subs = c.substate # only repos that are committed
3114 for s in sorted(subs):
3114 for s in sorted(subs):
3115 if not c.sub(s).push(opts.get('force')):
3115 if not c.sub(s).push(opts.get('force')):
3116 return False
3116 return False
3117 finally:
3117 finally:
3118 del repo._subtoppath
3118 del repo._subtoppath
3119 result = repo.push(other, opts.get('force'), revs=revs,
3119 result = repo.push(other, opts.get('force'), revs=revs,
3120 newbranch=opts.get('new_branch'))
3120 newbranch=opts.get('new_branch'))
3121
3121
3122 result = (result == 0)
3122 result = (result == 0)
3123
3123
3124 if opts.get('bookmark'):
3124 if opts.get('bookmark'):
3125 rb = other.listkeys('bookmarks')
3125 rb = other.listkeys('bookmarks')
3126 for b in opts['bookmark']:
3126 for b in opts['bookmark']:
3127 # explicit push overrides remote bookmark if any
3127 # explicit push overrides remote bookmark if any
3128 if b in repo._bookmarks:
3128 if b in repo._bookmarks:
3129 ui.status(_("exporting bookmark %s\n") % b)
3129 ui.status(_("exporting bookmark %s\n") % b)
3130 new = repo[b].hex()
3130 new = repo[b].hex()
3131 elif b in rb:
3131 elif b in rb:
3132 ui.status(_("deleting remote bookmark %s\n") % b)
3132 ui.status(_("deleting remote bookmark %s\n") % b)
3133 new = '' # delete
3133 new = '' # delete
3134 else:
3134 else:
3135 ui.warn(_('bookmark %s does not exist on the local '
3135 ui.warn(_('bookmark %s does not exist on the local '
3136 'or remote repository!\n') % b)
3136 'or remote repository!\n') % b)
3137 return 2
3137 return 2
3138 old = rb.get(b, '')
3138 old = rb.get(b, '')
3139 r = other.pushkey('bookmarks', b, old, new)
3139 r = other.pushkey('bookmarks', b, old, new)
3140 if not r:
3140 if not r:
3141 ui.warn(_('updating bookmark %s failed!\n') % b)
3141 ui.warn(_('updating bookmark %s failed!\n') % b)
3142 if not result:
3142 if not result:
3143 result = 2
3143 result = 2
3144
3144
3145 return result
3145 return result
3146
3146
3147 def recover(ui, repo):
3147 def recover(ui, repo):
3148 """roll back an interrupted transaction
3148 """roll back an interrupted transaction
3149
3149
3150 Recover from an interrupted commit or pull.
3150 Recover from an interrupted commit or pull.
3151
3151
3152 This command tries to fix the repository status after an
3152 This command tries to fix the repository status after an
3153 interrupted operation. It should only be necessary when Mercurial
3153 interrupted operation. It should only be necessary when Mercurial
3154 suggests it.
3154 suggests it.
3155
3155
3156 Returns 0 if successful, 1 if nothing to recover or verify fails.
3156 Returns 0 if successful, 1 if nothing to recover or verify fails.
3157 """
3157 """
3158 if repo.recover():
3158 if repo.recover():
3159 return hg.verify(repo)
3159 return hg.verify(repo)
3160 return 1
3160 return 1
3161
3161
3162 def remove(ui, repo, *pats, **opts):
3162 def remove(ui, repo, *pats, **opts):
3163 """remove the specified files on the next commit
3163 """remove the specified files on the next commit
3164
3164
3165 Schedule the indicated files for removal from the repository.
3165 Schedule the indicated files for removal from the repository.
3166
3166
3167 This only removes files from the current branch, not from the
3167 This only removes files from the current branch, not from the
3168 entire project history. -A/--after can be used to remove only
3168 entire project history. -A/--after can be used to remove only
3169 files that have already been deleted, -f/--force can be used to
3169 files that have already been deleted, -f/--force can be used to
3170 force deletion, and -Af can be used to remove files from the next
3170 force deletion, and -Af can be used to remove files from the next
3171 revision without deleting them from the working directory.
3171 revision without deleting them from the working directory.
3172
3172
3173 The following table details the behavior of remove for different
3173 The following table details the behavior of remove for different
3174 file states (columns) and option combinations (rows). The file
3174 file states (columns) and option combinations (rows). The file
3175 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
3175 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
3176 reported by :hg:`status`). The actions are Warn, Remove (from
3176 reported by :hg:`status`). The actions are Warn, Remove (from
3177 branch) and Delete (from disk)::
3177 branch) and Delete (from disk)::
3178
3178
3179 A C M !
3179 A C M !
3180 none W RD W R
3180 none W RD W R
3181 -f R RD RD R
3181 -f R RD RD R
3182 -A W W W R
3182 -A W W W R
3183 -Af R R R R
3183 -Af R R R R
3184
3184
3185 This command schedules the files to be removed at the next commit.
3185 This command schedules the files to be removed at the next commit.
3186 To undo a remove before that, see :hg:`revert`.
3186 To undo a remove before that, see :hg:`revert`.
3187
3187
3188 Returns 0 on success, 1 if any warnings encountered.
3188 Returns 0 on success, 1 if any warnings encountered.
3189 """
3189 """
3190
3190
3191 ret = 0
3191 ret = 0
3192 after, force = opts.get('after'), opts.get('force')
3192 after, force = opts.get('after'), opts.get('force')
3193 if not pats and not after:
3193 if not pats and not after:
3194 raise util.Abort(_('no files specified'))
3194 raise util.Abort(_('no files specified'))
3195
3195
3196 m = cmdutil.match(repo, pats, opts)
3196 m = cmdutil.match(repo, pats, opts)
3197 s = repo.status(match=m, clean=True)
3197 s = repo.status(match=m, clean=True)
3198 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
3198 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
3199
3199
3200 for f in m.files():
3200 for f in m.files():
3201 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
3201 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
3202 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
3202 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
3203 ret = 1
3203 ret = 1
3204
3204
3205 if force:
3205 if force:
3206 remove, forget = modified + deleted + clean, added
3206 remove, forget = modified + deleted + clean, added
3207 elif after:
3207 elif after:
3208 remove, forget = deleted, []
3208 remove, forget = deleted, []
3209 for f in modified + added + clean:
3209 for f in modified + added + clean:
3210 ui.warn(_('not removing %s: file still exists (use -f'
3210 ui.warn(_('not removing %s: file still exists (use -f'
3211 ' to force removal)\n') % m.rel(f))
3211 ' to force removal)\n') % m.rel(f))
3212 ret = 1
3212 ret = 1
3213 else:
3213 else:
3214 remove, forget = deleted + clean, []
3214 remove, forget = deleted + clean, []
3215 for f in modified:
3215 for f in modified:
3216 ui.warn(_('not removing %s: file is modified (use -f'
3216 ui.warn(_('not removing %s: file is modified (use -f'
3217 ' to force removal)\n') % m.rel(f))
3217 ' to force removal)\n') % m.rel(f))
3218 ret = 1
3218 ret = 1
3219 for f in added:
3219 for f in added:
3220 ui.warn(_('not removing %s: file has been marked for add (use -f'
3220 ui.warn(_('not removing %s: file has been marked for add (use -f'
3221 ' to force removal)\n') % m.rel(f))
3221 ' to force removal)\n') % m.rel(f))
3222 ret = 1
3222 ret = 1
3223
3223
3224 for f in sorted(remove + forget):
3224 for f in sorted(remove + forget):
3225 if ui.verbose or not m.exact(f):
3225 if ui.verbose or not m.exact(f):
3226 ui.status(_('removing %s\n') % m.rel(f))
3226 ui.status(_('removing %s\n') % m.rel(f))
3227
3227
3228 repo[None].forget(forget)
3228 repo[None].forget(forget)
3229 repo[None].remove(remove, unlink=not after)
3229 repo[None].remove(remove, unlink=not after)
3230 return ret
3230 return ret
3231
3231
3232 def rename(ui, repo, *pats, **opts):
3232 def rename(ui, repo, *pats, **opts):
3233 """rename files; equivalent of copy + remove
3233 """rename files; equivalent of copy + remove
3234
3234
3235 Mark dest as copies of sources; mark sources for deletion. If dest
3235 Mark dest as copies of sources; mark sources for deletion. If dest
3236 is a directory, copies are put in that directory. If dest is a
3236 is a directory, copies are put in that directory. If dest is a
3237 file, there can only be one source.
3237 file, there can only be one source.
3238
3238
3239 By default, this command copies the contents of files as they
3239 By default, this command copies the contents of files as they
3240 exist in the working directory. If invoked with -A/--after, the
3240 exist in the working directory. If invoked with -A/--after, the
3241 operation is recorded, but no copying is performed.
3241 operation is recorded, but no copying is performed.
3242
3242
3243 This command takes effect at the next commit. To undo a rename
3243 This command takes effect at the next commit. To undo a rename
3244 before that, see :hg:`revert`.
3244 before that, see :hg:`revert`.
3245
3245
3246 Returns 0 on success, 1 if errors are encountered.
3246 Returns 0 on success, 1 if errors are encountered.
3247 """
3247 """
3248 wlock = repo.wlock(False)
3248 wlock = repo.wlock(False)
3249 try:
3249 try:
3250 return cmdutil.copy(ui, repo, pats, opts, rename=True)
3250 return cmdutil.copy(ui, repo, pats, opts, rename=True)
3251 finally:
3251 finally:
3252 wlock.release()
3252 wlock.release()
3253
3253
3254 def resolve(ui, repo, *pats, **opts):
3254 def resolve(ui, repo, *pats, **opts):
3255 """redo merges or set/view the merge status of files
3255 """redo merges or set/view the merge status of files
3256
3256
3257 Merges with unresolved conflicts are often the result of
3257 Merges with unresolved conflicts are often the result of
3258 non-interactive merging using the ``internal:merge`` configuration
3258 non-interactive merging using the ``internal:merge`` configuration
3259 setting, or a command-line merge tool like ``diff3``. The resolve
3259 setting, or a command-line merge tool like ``diff3``. The resolve
3260 command is used to manage the files involved in a merge, after
3260 command is used to manage the files involved in a merge, after
3261 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
3261 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
3262 working directory must have two parents).
3262 working directory must have two parents).
3263
3263
3264 The resolve command can be used in the following ways:
3264 The resolve command can be used in the following ways:
3265
3265
3266 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
3266 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
3267 files, discarding any previous merge attempts. Re-merging is not
3267 files, discarding any previous merge attempts. Re-merging is not
3268 performed for files already marked as resolved. Use ``--all/-a``
3268 performed for files already marked as resolved. Use ``--all/-a``
3269 to selects all unresolved files. ``--tool`` can be used to specify
3269 to selects all unresolved files. ``--tool`` can be used to specify
3270 the merge tool used for the given files. It overrides the HGMERGE
3270 the merge tool used for the given files. It overrides the HGMERGE
3271 environment variable and your configuration files.
3271 environment variable and your configuration files.
3272
3272
3273 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
3273 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
3274 (e.g. after having manually fixed-up the files). The default is
3274 (e.g. after having manually fixed-up the files). The default is
3275 to mark all unresolved files.
3275 to mark all unresolved files.
3276
3276
3277 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
3277 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
3278 default is to mark all resolved files.
3278 default is to mark all resolved files.
3279
3279
3280 - :hg:`resolve -l`: list files which had or still have conflicts.
3280 - :hg:`resolve -l`: list files which had or still have conflicts.
3281 In the printed list, ``U`` = unresolved and ``R`` = resolved.
3281 In the printed list, ``U`` = unresolved and ``R`` = resolved.
3282
3282
3283 Note that Mercurial will not let you commit files with unresolved
3283 Note that Mercurial will not let you commit files with unresolved
3284 merge conflicts. You must use :hg:`resolve -m ...` before you can
3284 merge conflicts. You must use :hg:`resolve -m ...` before you can
3285 commit after a conflicting merge.
3285 commit after a conflicting merge.
3286
3286
3287 Returns 0 on success, 1 if any files fail a resolve attempt.
3287 Returns 0 on success, 1 if any files fail a resolve attempt.
3288 """
3288 """
3289
3289
3290 all, mark, unmark, show, nostatus = \
3290 all, mark, unmark, show, nostatus = \
3291 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
3291 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
3292
3292
3293 if (show and (mark or unmark)) or (mark and unmark):
3293 if (show and (mark or unmark)) or (mark and unmark):
3294 raise util.Abort(_("too many options specified"))
3294 raise util.Abort(_("too many options specified"))
3295 if pats and all:
3295 if pats and all:
3296 raise util.Abort(_("can't specify --all and patterns"))
3296 raise util.Abort(_("can't specify --all and patterns"))
3297 if not (all or pats or show or mark or unmark):
3297 if not (all or pats or show or mark or unmark):
3298 raise util.Abort(_('no files or directories specified; '
3298 raise util.Abort(_('no files or directories specified; '
3299 'use --all to remerge all files'))
3299 'use --all to remerge all files'))
3300
3300
3301 ms = mergemod.mergestate(repo)
3301 ms = mergemod.mergestate(repo)
3302 m = cmdutil.match(repo, pats, opts)
3302 m = cmdutil.match(repo, pats, opts)
3303 ret = 0
3303 ret = 0
3304
3304
3305 for f in ms:
3305 for f in ms:
3306 if m(f):
3306 if m(f):
3307 if show:
3307 if show:
3308 if nostatus:
3308 if nostatus:
3309 ui.write("%s\n" % f)
3309 ui.write("%s\n" % f)
3310 else:
3310 else:
3311 ui.write("%s %s\n" % (ms[f].upper(), f),
3311 ui.write("%s %s\n" % (ms[f].upper(), f),
3312 label='resolve.' +
3312 label='resolve.' +
3313 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
3313 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
3314 elif mark:
3314 elif mark:
3315 ms.mark(f, "r")
3315 ms.mark(f, "r")
3316 elif unmark:
3316 elif unmark:
3317 ms.mark(f, "u")
3317 ms.mark(f, "u")
3318 else:
3318 else:
3319 wctx = repo[None]
3319 wctx = repo[None]
3320 mctx = wctx.parents()[-1]
3320 mctx = wctx.parents()[-1]
3321
3321
3322 # backup pre-resolve (merge uses .orig for its own purposes)
3322 # backup pre-resolve (merge uses .orig for its own purposes)
3323 a = repo.wjoin(f)
3323 a = repo.wjoin(f)
3324 util.copyfile(a, a + ".resolve")
3324 util.copyfile(a, a + ".resolve")
3325
3325
3326 try:
3326 try:
3327 # resolve file
3327 # resolve file
3328 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
3328 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
3329 if ms.resolve(f, wctx, mctx):
3329 if ms.resolve(f, wctx, mctx):
3330 ret = 1
3330 ret = 1
3331 finally:
3331 finally:
3332 ui.setconfig('ui', 'forcemerge', '')
3332 ui.setconfig('ui', 'forcemerge', '')
3333
3333
3334 # replace filemerge's .orig file with our resolve file
3334 # replace filemerge's .orig file with our resolve file
3335 util.rename(a + ".resolve", a + ".orig")
3335 util.rename(a + ".resolve", a + ".orig")
3336
3336
3337 ms.commit()
3337 ms.commit()
3338 return ret
3338 return ret
3339
3339
3340 def revert(ui, repo, *pats, **opts):
3340 def revert(ui, repo, *pats, **opts):
3341 """restore individual files or directories to an earlier state
3341 """restore individual files or directories to an earlier state
3342
3342
3343 .. note::
3343 .. note::
3344 This command is most likely not what you are looking for.
3344 This command is most likely not what you are looking for.
3345 Revert will partially overwrite content in the working
3345 Revert will partially overwrite content in the working
3346 directory without changing the working directory parents. Use
3346 directory without changing the working directory parents. Use
3347 :hg:`update -r rev` to check out earlier revisions, or
3347 :hg:`update -r rev` to check out earlier revisions, or
3348 :hg:`update --clean .` to undo a merge which has added another
3348 :hg:`update --clean .` to undo a merge which has added another
3349 parent.
3349 parent.
3350
3350
3351 With no revision specified, revert the named files or directories
3351 With no revision specified, revert the named files or directories
3352 to the contents they had in the parent of the working directory.
3352 to the contents they had in the parent of the working directory.
3353 This restores the contents of the affected files to an unmodified
3353 This restores the contents of the affected files to an unmodified
3354 state and unschedules adds, removes, copies, and renames. If the
3354 state and unschedules adds, removes, copies, and renames. If the
3355 working directory has two parents, you must explicitly specify a
3355 working directory has two parents, you must explicitly specify a
3356 revision.
3356 revision.
3357
3357
3358 Using the -r/--rev option, revert the given files or directories
3358 Using the -r/--rev option, revert the given files or directories
3359 to their contents as of a specific revision. This can be helpful
3359 to their contents as of a specific revision. This can be helpful
3360 to "roll back" some or all of an earlier change. See :hg:`help
3360 to "roll back" some or all of an earlier change. See :hg:`help
3361 dates` for a list of formats valid for -d/--date.
3361 dates` for a list of formats valid for -d/--date.
3362
3362
3363 Revert modifies the working directory. It does not commit any
3363 Revert modifies the working directory. It does not commit any
3364 changes, or change the parent of the working directory. If you
3364 changes, or change the parent of the working directory. If you
3365 revert to a revision other than the parent of the working
3365 revert to a revision other than the parent of the working
3366 directory, the reverted files will thus appear modified
3366 directory, the reverted files will thus appear modified
3367 afterwards.
3367 afterwards.
3368
3368
3369 If a file has been deleted, it is restored. Files scheduled for
3369 If a file has been deleted, it is restored. Files scheduled for
3370 addition are just unscheduled and left as they are. If the
3370 addition are just unscheduled and left as they are. If the
3371 executable mode of a file was changed, it is reset.
3371 executable mode of a file was changed, it is reset.
3372
3372
3373 If names are given, all files matching the names are reverted.
3373 If names are given, all files matching the names are reverted.
3374 If no arguments are given, no files are reverted.
3374 If no arguments are given, no files are reverted.
3375
3375
3376 Modified files are saved with a .orig suffix before reverting.
3376 Modified files are saved with a .orig suffix before reverting.
3377 To disable these backups, use --no-backup.
3377 To disable these backups, use --no-backup.
3378
3378
3379 Returns 0 on success.
3379 Returns 0 on success.
3380 """
3380 """
3381
3381
3382 if opts.get("date"):
3382 if opts.get("date"):
3383 if opts.get("rev"):
3383 if opts.get("rev"):
3384 raise util.Abort(_("you can't specify a revision and a date"))
3384 raise util.Abort(_("you can't specify a revision and a date"))
3385 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
3385 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
3386
3386
3387 parent, p2 = repo.dirstate.parents()
3387 parent, p2 = repo.dirstate.parents()
3388 if not opts.get('rev') and p2 != nullid:
3388 if not opts.get('rev') and p2 != nullid:
3389 raise util.Abort(_('uncommitted merge - '
3389 raise util.Abort(_('uncommitted merge - '
3390 'use "hg update", see "hg help revert"'))
3390 'use "hg update", see "hg help revert"'))
3391
3391
3392 if not pats and not opts.get('all'):
3392 if not pats and not opts.get('all'):
3393 raise util.Abort(_('no files or directories specified; '
3393 raise util.Abort(_('no files or directories specified; '
3394 'use --all to revert the whole repo'))
3394 'use --all to revert the whole repo'))
3395
3395
3396 ctx = cmdutil.revsingle(repo, opts.get('rev'))
3396 ctx = cmdutil.revsingle(repo, opts.get('rev'))
3397 node = ctx.node()
3397 node = ctx.node()
3398 mf = ctx.manifest()
3398 mf = ctx.manifest()
3399 if node == parent:
3399 if node == parent:
3400 pmf = mf
3400 pmf = mf
3401 else:
3401 else:
3402 pmf = None
3402 pmf = None
3403
3403
3404 # need all matching names in dirstate and manifest of target rev,
3404 # need all matching names in dirstate and manifest of target rev,
3405 # so have to walk both. do not print errors if files exist in one
3405 # so have to walk both. do not print errors if files exist in one
3406 # but not other.
3406 # but not other.
3407
3407
3408 names = {}
3408 names = {}
3409
3409
3410 wlock = repo.wlock()
3410 wlock = repo.wlock()
3411 try:
3411 try:
3412 # walk dirstate.
3412 # walk dirstate.
3413
3413
3414 m = cmdutil.match(repo, pats, opts)
3414 m = cmdutil.match(repo, pats, opts)
3415 m.bad = lambda x, y: False
3415 m.bad = lambda x, y: False
3416 for abs in repo.walk(m):
3416 for abs in repo.walk(m):
3417 names[abs] = m.rel(abs), m.exact(abs)
3417 names[abs] = m.rel(abs), m.exact(abs)
3418
3418
3419 # walk target manifest.
3419 # walk target manifest.
3420
3420
3421 def badfn(path, msg):
3421 def badfn(path, msg):
3422 if path in names:
3422 if path in names:
3423 return
3423 return
3424 path_ = path + '/'
3424 path_ = path + '/'
3425 for f in names:
3425 for f in names:
3426 if f.startswith(path_):
3426 if f.startswith(path_):
3427 return
3427 return
3428 ui.warn("%s: %s\n" % (m.rel(path), msg))
3428 ui.warn("%s: %s\n" % (m.rel(path), msg))
3429
3429
3430 m = cmdutil.match(repo, pats, opts)
3430 m = cmdutil.match(repo, pats, opts)
3431 m.bad = badfn
3431 m.bad = badfn
3432 for abs in repo[node].walk(m):
3432 for abs in repo[node].walk(m):
3433 if abs not in names:
3433 if abs not in names:
3434 names[abs] = m.rel(abs), m.exact(abs)
3434 names[abs] = m.rel(abs), m.exact(abs)
3435
3435
3436 m = cmdutil.matchfiles(repo, names)
3436 m = cmdutil.matchfiles(repo, names)
3437 changes = repo.status(match=m)[:4]
3437 changes = repo.status(match=m)[:4]
3438 modified, added, removed, deleted = map(set, changes)
3438 modified, added, removed, deleted = map(set, changes)
3439
3439
3440 # if f is a rename, also revert the source
3440 # if f is a rename, also revert the source
3441 cwd = repo.getcwd()
3441 cwd = repo.getcwd()
3442 for f in added:
3442 for f in added:
3443 src = repo.dirstate.copied(f)
3443 src = repo.dirstate.copied(f)
3444 if src and src not in names and repo.dirstate[src] == 'r':
3444 if src and src not in names and repo.dirstate[src] == 'r':
3445 removed.add(src)
3445 removed.add(src)
3446 names[src] = (repo.pathto(src, cwd), True)
3446 names[src] = (repo.pathto(src, cwd), True)
3447
3447
3448 def removeforget(abs):
3448 def removeforget(abs):
3449 if repo.dirstate[abs] == 'a':
3449 if repo.dirstate[abs] == 'a':
3450 return _('forgetting %s\n')
3450 return _('forgetting %s\n')
3451 return _('removing %s\n')
3451 return _('removing %s\n')
3452
3452
3453 revert = ([], _('reverting %s\n'))
3453 revert = ([], _('reverting %s\n'))
3454 add = ([], _('adding %s\n'))
3454 add = ([], _('adding %s\n'))
3455 remove = ([], removeforget)
3455 remove = ([], removeforget)
3456 undelete = ([], _('undeleting %s\n'))
3456 undelete = ([], _('undeleting %s\n'))
3457
3457
3458 disptable = (
3458 disptable = (
3459 # dispatch table:
3459 # dispatch table:
3460 # file state
3460 # file state
3461 # action if in target manifest
3461 # action if in target manifest
3462 # action if not in target manifest
3462 # action if not in target manifest
3463 # make backup if in target manifest
3463 # make backup if in target manifest
3464 # make backup if not in target manifest
3464 # make backup if not in target manifest
3465 (modified, revert, remove, True, True),
3465 (modified, revert, remove, True, True),
3466 (added, revert, remove, True, False),
3466 (added, revert, remove, True, False),
3467 (removed, undelete, None, False, False),
3467 (removed, undelete, None, False, False),
3468 (deleted, revert, remove, False, False),
3468 (deleted, revert, remove, False, False),
3469 )
3469 )
3470
3470
3471 for abs, (rel, exact) in sorted(names.items()):
3471 for abs, (rel, exact) in sorted(names.items()):
3472 mfentry = mf.get(abs)
3472 mfentry = mf.get(abs)
3473 target = repo.wjoin(abs)
3473 target = repo.wjoin(abs)
3474 def handle(xlist, dobackup):
3474 def handle(xlist, dobackup):
3475 xlist[0].append(abs)
3475 xlist[0].append(abs)
3476 if (dobackup and not opts.get('no_backup') and
3476 if (dobackup and not opts.get('no_backup') and
3477 os.path.lexists(target)):
3477 os.path.lexists(target)):
3478 bakname = "%s.orig" % rel
3478 bakname = "%s.orig" % rel
3479 ui.note(_('saving current version of %s as %s\n') %
3479 ui.note(_('saving current version of %s as %s\n') %
3480 (rel, bakname))
3480 (rel, bakname))
3481 if not opts.get('dry_run'):
3481 if not opts.get('dry_run'):
3482 util.rename(target, bakname)
3482 util.rename(target, bakname)
3483 if ui.verbose or not exact:
3483 if ui.verbose or not exact:
3484 msg = xlist[1]
3484 msg = xlist[1]
3485 if not isinstance(msg, basestring):
3485 if not isinstance(msg, basestring):
3486 msg = msg(abs)
3486 msg = msg(abs)
3487 ui.status(msg % rel)
3487 ui.status(msg % rel)
3488 for table, hitlist, misslist, backuphit, backupmiss in disptable:
3488 for table, hitlist, misslist, backuphit, backupmiss in disptable:
3489 if abs not in table:
3489 if abs not in table:
3490 continue
3490 continue
3491 # file has changed in dirstate
3491 # file has changed in dirstate
3492 if mfentry:
3492 if mfentry:
3493 handle(hitlist, backuphit)
3493 handle(hitlist, backuphit)
3494 elif misslist is not None:
3494 elif misslist is not None:
3495 handle(misslist, backupmiss)
3495 handle(misslist, backupmiss)
3496 break
3496 break
3497 else:
3497 else:
3498 if abs not in repo.dirstate:
3498 if abs not in repo.dirstate:
3499 if mfentry:
3499 if mfentry:
3500 handle(add, True)
3500 handle(add, True)
3501 elif exact:
3501 elif exact:
3502 ui.warn(_('file not managed: %s\n') % rel)
3502 ui.warn(_('file not managed: %s\n') % rel)
3503 continue
3503 continue
3504 # file has not changed in dirstate
3504 # file has not changed in dirstate
3505 if node == parent:
3505 if node == parent:
3506 if exact:
3506 if exact:
3507 ui.warn(_('no changes needed to %s\n') % rel)
3507 ui.warn(_('no changes needed to %s\n') % rel)
3508 continue
3508 continue
3509 if pmf is None:
3509 if pmf is None:
3510 # only need parent manifest in this unlikely case,
3510 # only need parent manifest in this unlikely case,
3511 # so do not read by default
3511 # so do not read by default
3512 pmf = repo[parent].manifest()
3512 pmf = repo[parent].manifest()
3513 if abs in pmf:
3513 if abs in pmf:
3514 if mfentry:
3514 if mfentry:
3515 # if version of file is same in parent and target
3515 # if version of file is same in parent and target
3516 # manifests, do nothing
3516 # manifests, do nothing
3517 if (pmf[abs] != mfentry or
3517 if (pmf[abs] != mfentry or
3518 pmf.flags(abs) != mf.flags(abs)):
3518 pmf.flags(abs) != mf.flags(abs)):
3519 handle(revert, False)
3519 handle(revert, False)
3520 else:
3520 else:
3521 handle(remove, False)
3521 handle(remove, False)
3522
3522
3523 if not opts.get('dry_run'):
3523 if not opts.get('dry_run'):
3524 def checkout(f):
3524 def checkout(f):
3525 fc = ctx[f]
3525 fc = ctx[f]
3526 repo.wwrite(f, fc.data(), fc.flags())
3526 repo.wwrite(f, fc.data(), fc.flags())
3527
3527
3528 audit_path = scmutil.path_auditor(repo.root)
3528 audit_path = scmutil.path_auditor(repo.root)
3529 for f in remove[0]:
3529 for f in remove[0]:
3530 if repo.dirstate[f] == 'a':
3530 if repo.dirstate[f] == 'a':
3531 repo.dirstate.forget(f)
3531 repo.dirstate.forget(f)
3532 continue
3532 continue
3533 audit_path(f)
3533 audit_path(f)
3534 try:
3534 try:
3535 util.unlinkpath(repo.wjoin(f))
3535 util.unlinkpath(repo.wjoin(f))
3536 except OSError:
3536 except OSError:
3537 pass
3537 pass
3538 repo.dirstate.remove(f)
3538 repo.dirstate.remove(f)
3539
3539
3540 normal = None
3540 normal = None
3541 if node == parent:
3541 if node == parent:
3542 # We're reverting to our parent. If possible, we'd like status
3542 # We're reverting to our parent. If possible, we'd like status
3543 # to report the file as clean. We have to use normallookup for
3543 # to report the file as clean. We have to use normallookup for
3544 # merges to avoid losing information about merged/dirty files.
3544 # merges to avoid losing information about merged/dirty files.
3545 if p2 != nullid:
3545 if p2 != nullid:
3546 normal = repo.dirstate.normallookup
3546 normal = repo.dirstate.normallookup
3547 else:
3547 else:
3548 normal = repo.dirstate.normal
3548 normal = repo.dirstate.normal
3549 for f in revert[0]:
3549 for f in revert[0]:
3550 checkout(f)
3550 checkout(f)
3551 if normal:
3551 if normal:
3552 normal(f)
3552 normal(f)
3553
3553
3554 for f in add[0]:
3554 for f in add[0]:
3555 checkout(f)
3555 checkout(f)
3556 repo.dirstate.add(f)
3556 repo.dirstate.add(f)
3557
3557
3558 normal = repo.dirstate.normallookup
3558 normal = repo.dirstate.normallookup
3559 if node == parent and p2 == nullid:
3559 if node == parent and p2 == nullid:
3560 normal = repo.dirstate.normal
3560 normal = repo.dirstate.normal
3561 for f in undelete[0]:
3561 for f in undelete[0]:
3562 checkout(f)
3562 checkout(f)
3563 normal(f)
3563 normal(f)
3564
3564
3565 finally:
3565 finally:
3566 wlock.release()
3566 wlock.release()
3567
3567
3568 def rollback(ui, repo, **opts):
3568 def rollback(ui, repo, **opts):
3569 """roll back the last transaction (dangerous)
3569 """roll back the last transaction (dangerous)
3570
3570
3571 This command should be used with care. There is only one level of
3571 This command should be used with care. There is only one level of
3572 rollback, and there is no way to undo a rollback. It will also
3572 rollback, and there is no way to undo a rollback. It will also
3573 restore the dirstate at the time of the last transaction, losing
3573 restore the dirstate at the time of the last transaction, losing
3574 any dirstate changes since that time. This command does not alter
3574 any dirstate changes since that time. This command does not alter
3575 the working directory.
3575 the working directory.
3576
3576
3577 Transactions are used to encapsulate the effects of all commands
3577 Transactions are used to encapsulate the effects of all commands
3578 that create new changesets or propagate existing changesets into a
3578 that create new changesets or propagate existing changesets into a
3579 repository. For example, the following commands are transactional,
3579 repository. For example, the following commands are transactional,
3580 and their effects can be rolled back:
3580 and their effects can be rolled back:
3581
3581
3582 - commit
3582 - commit
3583 - import
3583 - import
3584 - pull
3584 - pull
3585 - push (with this repository as the destination)
3585 - push (with this repository as the destination)
3586 - unbundle
3586 - unbundle
3587
3587
3588 This command is not intended for use on public repositories. Once
3588 This command is not intended for use on public repositories. Once
3589 changes are visible for pull by other users, rolling a transaction
3589 changes are visible for pull by other users, rolling a transaction
3590 back locally is ineffective (someone else may already have pulled
3590 back locally is ineffective (someone else may already have pulled
3591 the changes). Furthermore, a race is possible with readers of the
3591 the changes). Furthermore, a race is possible with readers of the
3592 repository; for example an in-progress pull from the repository
3592 repository; for example an in-progress pull from the repository
3593 may fail if a rollback is performed.
3593 may fail if a rollback is performed.
3594
3594
3595 Returns 0 on success, 1 if no rollback data is available.
3595 Returns 0 on success, 1 if no rollback data is available.
3596 """
3596 """
3597 return repo.rollback(opts.get('dry_run'))
3597 return repo.rollback(opts.get('dry_run'))
3598
3598
3599 def root(ui, repo):
3599 def root(ui, repo):
3600 """print the root (top) of the current working directory
3600 """print the root (top) of the current working directory
3601
3601
3602 Print the root directory of the current repository.
3602 Print the root directory of the current repository.
3603
3603
3604 Returns 0 on success.
3604 Returns 0 on success.
3605 """
3605 """
3606 ui.write(repo.root + "\n")
3606 ui.write(repo.root + "\n")
3607
3607
3608 def serve(ui, repo, **opts):
3608 def serve(ui, repo, **opts):
3609 """start stand-alone webserver
3609 """start stand-alone webserver
3610
3610
3611 Start a local HTTP repository browser and pull server. You can use
3611 Start a local HTTP repository browser and pull server. You can use
3612 this for ad-hoc sharing and browsing of repositories. It is
3612 this for ad-hoc sharing and browsing of repositories. It is
3613 recommended to use a real web server to serve a repository for
3613 recommended to use a real web server to serve a repository for
3614 longer periods of time.
3614 longer periods of time.
3615
3615
3616 Please note that the server does not implement access control.
3616 Please note that the server does not implement access control.
3617 This means that, by default, anybody can read from the server and
3617 This means that, by default, anybody can read from the server and
3618 nobody can write to it by default. Set the ``web.allow_push``
3618 nobody can write to it by default. Set the ``web.allow_push``
3619 option to ``*`` to allow everybody to push to the server. You
3619 option to ``*`` to allow everybody to push to the server. You
3620 should use a real web server if you need to authenticate users.
3620 should use a real web server if you need to authenticate users.
3621
3621
3622 By default, the server logs accesses to stdout and errors to
3622 By default, the server logs accesses to stdout and errors to
3623 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
3623 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
3624 files.
3624 files.
3625
3625
3626 To have the server choose a free port number to listen on, specify
3626 To have the server choose a free port number to listen on, specify
3627 a port number of 0; in this case, the server will print the port
3627 a port number of 0; in this case, the server will print the port
3628 number it uses.
3628 number it uses.
3629
3629
3630 Returns 0 on success.
3630 Returns 0 on success.
3631 """
3631 """
3632
3632
3633 if opts["stdio"]:
3633 if opts["stdio"]:
3634 if repo is None:
3634 if repo is None:
3635 raise error.RepoError(_("There is no Mercurial repository here"
3635 raise error.RepoError(_("There is no Mercurial repository here"
3636 " (.hg not found)"))
3636 " (.hg not found)"))
3637 s = sshserver.sshserver(ui, repo)
3637 s = sshserver.sshserver(ui, repo)
3638 s.serve_forever()
3638 s.serve_forever()
3639
3639
3640 # this way we can check if something was given in the command-line
3640 # this way we can check if something was given in the command-line
3641 if opts.get('port'):
3641 if opts.get('port'):
3642 opts['port'] = util.getport(opts.get('port'))
3642 opts['port'] = util.getport(opts.get('port'))
3643
3643
3644 baseui = repo and repo.baseui or ui
3644 baseui = repo and repo.baseui or ui
3645 optlist = ("name templates style address port prefix ipv6"
3645 optlist = ("name templates style address port prefix ipv6"
3646 " accesslog errorlog certificate encoding")
3646 " accesslog errorlog certificate encoding")
3647 for o in optlist.split():
3647 for o in optlist.split():
3648 val = opts.get(o, '')
3648 val = opts.get(o, '')
3649 if val in (None, ''): # should check against default options instead
3649 if val in (None, ''): # should check against default options instead
3650 continue
3650 continue
3651 baseui.setconfig("web", o, val)
3651 baseui.setconfig("web", o, val)
3652 if repo and repo.ui != baseui:
3652 if repo and repo.ui != baseui:
3653 repo.ui.setconfig("web", o, val)
3653 repo.ui.setconfig("web", o, val)
3654
3654
3655 o = opts.get('web_conf') or opts.get('webdir_conf')
3655 o = opts.get('web_conf') or opts.get('webdir_conf')
3656 if not o:
3656 if not o:
3657 if not repo:
3657 if not repo:
3658 raise error.RepoError(_("There is no Mercurial repository"
3658 raise error.RepoError(_("There is no Mercurial repository"
3659 " here (.hg not found)"))
3659 " here (.hg not found)"))
3660 o = repo.root
3660 o = repo.root
3661
3661
3662 app = hgweb.hgweb(o, baseui=ui)
3662 app = hgweb.hgweb(o, baseui=ui)
3663
3663
3664 class service(object):
3664 class service(object):
3665 def init(self):
3665 def init(self):
3666 util.set_signal_handler()
3666 util.set_signal_handler()
3667 self.httpd = hgweb.server.create_server(ui, app)
3667 self.httpd = hgweb.server.create_server(ui, app)
3668
3668
3669 if opts['port'] and not ui.verbose:
3669 if opts['port'] and not ui.verbose:
3670 return
3670 return
3671
3671
3672 if self.httpd.prefix:
3672 if self.httpd.prefix:
3673 prefix = self.httpd.prefix.strip('/') + '/'
3673 prefix = self.httpd.prefix.strip('/') + '/'
3674 else:
3674 else:
3675 prefix = ''
3675 prefix = ''
3676
3676
3677 port = ':%d' % self.httpd.port
3677 port = ':%d' % self.httpd.port
3678 if port == ':80':
3678 if port == ':80':
3679 port = ''
3679 port = ''
3680
3680
3681 bindaddr = self.httpd.addr
3681 bindaddr = self.httpd.addr
3682 if bindaddr == '0.0.0.0':
3682 if bindaddr == '0.0.0.0':
3683 bindaddr = '*'
3683 bindaddr = '*'
3684 elif ':' in bindaddr: # IPv6
3684 elif ':' in bindaddr: # IPv6
3685 bindaddr = '[%s]' % bindaddr
3685 bindaddr = '[%s]' % bindaddr
3686
3686
3687 fqaddr = self.httpd.fqaddr
3687 fqaddr = self.httpd.fqaddr
3688 if ':' in fqaddr:
3688 if ':' in fqaddr:
3689 fqaddr = '[%s]' % fqaddr
3689 fqaddr = '[%s]' % fqaddr
3690 if opts['port']:
3690 if opts['port']:
3691 write = ui.status
3691 write = ui.status
3692 else:
3692 else:
3693 write = ui.write
3693 write = ui.write
3694 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
3694 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
3695 (fqaddr, port, prefix, bindaddr, self.httpd.port))
3695 (fqaddr, port, prefix, bindaddr, self.httpd.port))
3696
3696
3697 def run(self):
3697 def run(self):
3698 self.httpd.serve_forever()
3698 self.httpd.serve_forever()
3699
3699
3700 service = service()
3700 service = service()
3701
3701
3702 cmdutil.service(opts, initfn=service.init, runfn=service.run)
3702 cmdutil.service(opts, initfn=service.init, runfn=service.run)
3703
3703
3704 def status(ui, repo, *pats, **opts):
3704 def status(ui, repo, *pats, **opts):
3705 """show changed files in the working directory
3705 """show changed files in the working directory
3706
3706
3707 Show status of files in the repository. If names are given, only
3707 Show status of files in the repository. If names are given, only
3708 files that match are shown. Files that are clean or ignored or
3708 files that match are shown. Files that are clean or ignored or
3709 the source of a copy/move operation, are not listed unless
3709 the source of a copy/move operation, are not listed unless
3710 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
3710 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
3711 Unless options described with "show only ..." are given, the
3711 Unless options described with "show only ..." are given, the
3712 options -mardu are used.
3712 options -mardu are used.
3713
3713
3714 Option -q/--quiet hides untracked (unknown and ignored) files
3714 Option -q/--quiet hides untracked (unknown and ignored) files
3715 unless explicitly requested with -u/--unknown or -i/--ignored.
3715 unless explicitly requested with -u/--unknown or -i/--ignored.
3716
3716
3717 .. note::
3717 .. note::
3718 status may appear to disagree with diff if permissions have
3718 status may appear to disagree with diff if permissions have
3719 changed or a merge has occurred. The standard diff format does
3719 changed or a merge has occurred. The standard diff format does
3720 not report permission changes and diff only reports changes
3720 not report permission changes and diff only reports changes
3721 relative to one merge parent.
3721 relative to one merge parent.
3722
3722
3723 If one revision is given, it is used as the base revision.
3723 If one revision is given, it is used as the base revision.
3724 If two revisions are given, the differences between them are
3724 If two revisions are given, the differences between them are
3725 shown. The --change option can also be used as a shortcut to list
3725 shown. The --change option can also be used as a shortcut to list
3726 the changed files of a revision from its first parent.
3726 the changed files of a revision from its first parent.
3727
3727
3728 The codes used to show the status of files are::
3728 The codes used to show the status of files are::
3729
3729
3730 M = modified
3730 M = modified
3731 A = added
3731 A = added
3732 R = removed
3732 R = removed
3733 C = clean
3733 C = clean
3734 ! = missing (deleted by non-hg command, but still tracked)
3734 ! = missing (deleted by non-hg command, but still tracked)
3735 ? = not tracked
3735 ? = not tracked
3736 I = ignored
3736 I = ignored
3737 = origin of the previous file listed as A (added)
3737 = origin of the previous file listed as A (added)
3738
3738
3739 Returns 0 on success.
3739 Returns 0 on success.
3740 """
3740 """
3741
3741
3742 revs = opts.get('rev')
3742 revs = opts.get('rev')
3743 change = opts.get('change')
3743 change = opts.get('change')
3744
3744
3745 if revs and change:
3745 if revs and change:
3746 msg = _('cannot specify --rev and --change at the same time')
3746 msg = _('cannot specify --rev and --change at the same time')
3747 raise util.Abort(msg)
3747 raise util.Abort(msg)
3748 elif change:
3748 elif change:
3749 node2 = repo.lookup(change)
3749 node2 = repo.lookup(change)
3750 node1 = repo[node2].p1().node()
3750 node1 = repo[node2].p1().node()
3751 else:
3751 else:
3752 node1, node2 = cmdutil.revpair(repo, revs)
3752 node1, node2 = cmdutil.revpair(repo, revs)
3753
3753
3754 cwd = (pats and repo.getcwd()) or ''
3754 cwd = (pats and repo.getcwd()) or ''
3755 end = opts.get('print0') and '\0' or '\n'
3755 end = opts.get('print0') and '\0' or '\n'
3756 copy = {}
3756 copy = {}
3757 states = 'modified added removed deleted unknown ignored clean'.split()
3757 states = 'modified added removed deleted unknown ignored clean'.split()
3758 show = [k for k in states if opts.get(k)]
3758 show = [k for k in states if opts.get(k)]
3759 if opts.get('all'):
3759 if opts.get('all'):
3760 show += ui.quiet and (states[:4] + ['clean']) or states
3760 show += ui.quiet and (states[:4] + ['clean']) or states
3761 if not show:
3761 if not show:
3762 show = ui.quiet and states[:4] or states[:5]
3762 show = ui.quiet and states[:4] or states[:5]
3763
3763
3764 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3764 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3765 'ignored' in show, 'clean' in show, 'unknown' in show,
3765 'ignored' in show, 'clean' in show, 'unknown' in show,
3766 opts.get('subrepos'))
3766 opts.get('subrepos'))
3767 changestates = zip(states, 'MAR!?IC', stat)
3767 changestates = zip(states, 'MAR!?IC', stat)
3768
3768
3769 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3769 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3770 ctxn = repo[nullid]
3770 ctxn = repo[nullid]
3771 ctx1 = repo[node1]
3771 ctx1 = repo[node1]
3772 ctx2 = repo[node2]
3772 ctx2 = repo[node2]
3773 added = stat[1]
3773 added = stat[1]
3774 if node2 is None:
3774 if node2 is None:
3775 added = stat[0] + stat[1] # merged?
3775 added = stat[0] + stat[1] # merged?
3776
3776
3777 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3777 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3778 if k in added:
3778 if k in added:
3779 copy[k] = v
3779 copy[k] = v
3780 elif v in added:
3780 elif v in added:
3781 copy[v] = k
3781 copy[v] = k
3782
3782
3783 for state, char, files in changestates:
3783 for state, char, files in changestates:
3784 if state in show:
3784 if state in show:
3785 format = "%s %%s%s" % (char, end)
3785 format = "%s %%s%s" % (char, end)
3786 if opts.get('no_status'):
3786 if opts.get('no_status'):
3787 format = "%%s%s" % end
3787 format = "%%s%s" % end
3788
3788
3789 for f in files:
3789 for f in files:
3790 ui.write(format % repo.pathto(f, cwd),
3790 ui.write(format % repo.pathto(f, cwd),
3791 label='status.' + state)
3791 label='status.' + state)
3792 if f in copy:
3792 if f in copy:
3793 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
3793 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
3794 label='status.copied')
3794 label='status.copied')
3795
3795
3796 def summary(ui, repo, **opts):
3796 def summary(ui, repo, **opts):
3797 """summarize working directory state
3797 """summarize working directory state
3798
3798
3799 This generates a brief summary of the working directory state,
3799 This generates a brief summary of the working directory state,
3800 including parents, branch, commit status, and available updates.
3800 including parents, branch, commit status, and available updates.
3801
3801
3802 With the --remote option, this will check the default paths for
3802 With the --remote option, this will check the default paths for
3803 incoming and outgoing changes. This can be time-consuming.
3803 incoming and outgoing changes. This can be time-consuming.
3804
3804
3805 Returns 0 on success.
3805 Returns 0 on success.
3806 """
3806 """
3807
3807
3808 ctx = repo[None]
3808 ctx = repo[None]
3809 parents = ctx.parents()
3809 parents = ctx.parents()
3810 pnode = parents[0].node()
3810 pnode = parents[0].node()
3811
3811
3812 for p in parents:
3812 for p in parents:
3813 # label with log.changeset (instead of log.parent) since this
3813 # label with log.changeset (instead of log.parent) since this
3814 # shows a working directory parent *changeset*:
3814 # shows a working directory parent *changeset*:
3815 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
3815 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
3816 label='log.changeset')
3816 label='log.changeset')
3817 ui.write(' '.join(p.tags()), label='log.tag')
3817 ui.write(' '.join(p.tags()), label='log.tag')
3818 if p.bookmarks():
3818 if p.bookmarks():
3819 ui.write(' ' + ' '.join(p.bookmarks()), label='log.bookmark')
3819 ui.write(' ' + ' '.join(p.bookmarks()), label='log.bookmark')
3820 if p.rev() == -1:
3820 if p.rev() == -1:
3821 if not len(repo):
3821 if not len(repo):
3822 ui.write(_(' (empty repository)'))
3822 ui.write(_(' (empty repository)'))
3823 else:
3823 else:
3824 ui.write(_(' (no revision checked out)'))
3824 ui.write(_(' (no revision checked out)'))
3825 ui.write('\n')
3825 ui.write('\n')
3826 if p.description():
3826 if p.description():
3827 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
3827 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
3828 label='log.summary')
3828 label='log.summary')
3829
3829
3830 branch = ctx.branch()
3830 branch = ctx.branch()
3831 bheads = repo.branchheads(branch)
3831 bheads = repo.branchheads(branch)
3832 m = _('branch: %s\n') % branch
3832 m = _('branch: %s\n') % branch
3833 if branch != 'default':
3833 if branch != 'default':
3834 ui.write(m, label='log.branch')
3834 ui.write(m, label='log.branch')
3835 else:
3835 else:
3836 ui.status(m, label='log.branch')
3836 ui.status(m, label='log.branch')
3837
3837
3838 st = list(repo.status(unknown=True))[:6]
3838 st = list(repo.status(unknown=True))[:6]
3839
3839
3840 c = repo.dirstate.copies()
3840 c = repo.dirstate.copies()
3841 copied, renamed = [], []
3841 copied, renamed = [], []
3842 for d, s in c.iteritems():
3842 for d, s in c.iteritems():
3843 if s in st[2]:
3843 if s in st[2]:
3844 st[2].remove(s)
3844 st[2].remove(s)
3845 renamed.append(d)
3845 renamed.append(d)
3846 else:
3846 else:
3847 copied.append(d)
3847 copied.append(d)
3848 if d in st[1]:
3848 if d in st[1]:
3849 st[1].remove(d)
3849 st[1].remove(d)
3850 st.insert(3, renamed)
3850 st.insert(3, renamed)
3851 st.insert(4, copied)
3851 st.insert(4, copied)
3852
3852
3853 ms = mergemod.mergestate(repo)
3853 ms = mergemod.mergestate(repo)
3854 st.append([f for f in ms if ms[f] == 'u'])
3854 st.append([f for f in ms if ms[f] == 'u'])
3855
3855
3856 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
3856 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
3857 st.append(subs)
3857 st.append(subs)
3858
3858
3859 labels = [ui.label(_('%d modified'), 'status.modified'),
3859 labels = [ui.label(_('%d modified'), 'status.modified'),
3860 ui.label(_('%d added'), 'status.added'),
3860 ui.label(_('%d added'), 'status.added'),
3861 ui.label(_('%d removed'), 'status.removed'),
3861 ui.label(_('%d removed'), 'status.removed'),
3862 ui.label(_('%d renamed'), 'status.copied'),
3862 ui.label(_('%d renamed'), 'status.copied'),
3863 ui.label(_('%d copied'), 'status.copied'),
3863 ui.label(_('%d copied'), 'status.copied'),
3864 ui.label(_('%d deleted'), 'status.deleted'),
3864 ui.label(_('%d deleted'), 'status.deleted'),
3865 ui.label(_('%d unknown'), 'status.unknown'),
3865 ui.label(_('%d unknown'), 'status.unknown'),
3866 ui.label(_('%d ignored'), 'status.ignored'),
3866 ui.label(_('%d ignored'), 'status.ignored'),
3867 ui.label(_('%d unresolved'), 'resolve.unresolved'),
3867 ui.label(_('%d unresolved'), 'resolve.unresolved'),
3868 ui.label(_('%d subrepos'), 'status.modified')]
3868 ui.label(_('%d subrepos'), 'status.modified')]
3869 t = []
3869 t = []
3870 for s, l in zip(st, labels):
3870 for s, l in zip(st, labels):
3871 if s:
3871 if s:
3872 t.append(l % len(s))
3872 t.append(l % len(s))
3873
3873
3874 t = ', '.join(t)
3874 t = ', '.join(t)
3875 cleanworkdir = False
3875 cleanworkdir = False
3876
3876
3877 if len(parents) > 1:
3877 if len(parents) > 1:
3878 t += _(' (merge)')
3878 t += _(' (merge)')
3879 elif branch != parents[0].branch():
3879 elif branch != parents[0].branch():
3880 t += _(' (new branch)')
3880 t += _(' (new branch)')
3881 elif (parents[0].extra().get('close') and
3881 elif (parents[0].extra().get('close') and
3882 pnode in repo.branchheads(branch, closed=True)):
3882 pnode in repo.branchheads(branch, closed=True)):
3883 t += _(' (head closed)')
3883 t += _(' (head closed)')
3884 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
3884 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
3885 t += _(' (clean)')
3885 t += _(' (clean)')
3886 cleanworkdir = True
3886 cleanworkdir = True
3887 elif pnode not in bheads:
3887 elif pnode not in bheads:
3888 t += _(' (new branch head)')
3888 t += _(' (new branch head)')
3889
3889
3890 if cleanworkdir:
3890 if cleanworkdir:
3891 ui.status(_('commit: %s\n') % t.strip())
3891 ui.status(_('commit: %s\n') % t.strip())
3892 else:
3892 else:
3893 ui.write(_('commit: %s\n') % t.strip())
3893 ui.write(_('commit: %s\n') % t.strip())
3894
3894
3895 # all ancestors of branch heads - all ancestors of parent = new csets
3895 # all ancestors of branch heads - all ancestors of parent = new csets
3896 new = [0] * len(repo)
3896 new = [0] * len(repo)
3897 cl = repo.changelog
3897 cl = repo.changelog
3898 for a in [cl.rev(n) for n in bheads]:
3898 for a in [cl.rev(n) for n in bheads]:
3899 new[a] = 1
3899 new[a] = 1
3900 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3900 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3901 new[a] = 1
3901 new[a] = 1
3902 for a in [p.rev() for p in parents]:
3902 for a in [p.rev() for p in parents]:
3903 if a >= 0:
3903 if a >= 0:
3904 new[a] = 0
3904 new[a] = 0
3905 for a in cl.ancestors(*[p.rev() for p in parents]):
3905 for a in cl.ancestors(*[p.rev() for p in parents]):
3906 new[a] = 0
3906 new[a] = 0
3907 new = sum(new)
3907 new = sum(new)
3908
3908
3909 if new == 0:
3909 if new == 0:
3910 ui.status(_('update: (current)\n'))
3910 ui.status(_('update: (current)\n'))
3911 elif pnode not in bheads:
3911 elif pnode not in bheads:
3912 ui.write(_('update: %d new changesets (update)\n') % new)
3912 ui.write(_('update: %d new changesets (update)\n') % new)
3913 else:
3913 else:
3914 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3914 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3915 (new, len(bheads)))
3915 (new, len(bheads)))
3916
3916
3917 if opts.get('remote'):
3917 if opts.get('remote'):
3918 t = []
3918 t = []
3919 source, branches = hg.parseurl(ui.expandpath('default'))
3919 source, branches = hg.parseurl(ui.expandpath('default'))
3920 other = hg.repository(hg.remoteui(repo, {}), source)
3920 other = hg.repository(hg.remoteui(repo, {}), source)
3921 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3921 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3922 ui.debug('comparing with %s\n' % url.hidepassword(source))
3922 ui.debug('comparing with %s\n' % util.hidepassword(source))
3923 repo.ui.pushbuffer()
3923 repo.ui.pushbuffer()
3924 common, incoming, rheads = discovery.findcommonincoming(repo, other)
3924 common, incoming, rheads = discovery.findcommonincoming(repo, other)
3925 repo.ui.popbuffer()
3925 repo.ui.popbuffer()
3926 if incoming:
3926 if incoming:
3927 t.append(_('1 or more incoming'))
3927 t.append(_('1 or more incoming'))
3928
3928
3929 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3929 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3930 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3930 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3931 other = hg.repository(hg.remoteui(repo, {}), dest)
3931 other = hg.repository(hg.remoteui(repo, {}), dest)
3932 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3932 ui.debug('comparing with %s\n' % util.hidepassword(dest))
3933 repo.ui.pushbuffer()
3933 repo.ui.pushbuffer()
3934 common, _anyinc, _heads = discovery.findcommonincoming(repo, other)
3934 common, _anyinc, _heads = discovery.findcommonincoming(repo, other)
3935 repo.ui.popbuffer()
3935 repo.ui.popbuffer()
3936 o = repo.changelog.findmissing(common=common)
3936 o = repo.changelog.findmissing(common=common)
3937 if o:
3937 if o:
3938 t.append(_('%d outgoing') % len(o))
3938 t.append(_('%d outgoing') % len(o))
3939 if 'bookmarks' in other.listkeys('namespaces'):
3939 if 'bookmarks' in other.listkeys('namespaces'):
3940 lmarks = repo.listkeys('bookmarks')
3940 lmarks = repo.listkeys('bookmarks')
3941 rmarks = other.listkeys('bookmarks')
3941 rmarks = other.listkeys('bookmarks')
3942 diff = set(rmarks) - set(lmarks)
3942 diff = set(rmarks) - set(lmarks)
3943 if len(diff) > 0:
3943 if len(diff) > 0:
3944 t.append(_('%d incoming bookmarks') % len(diff))
3944 t.append(_('%d incoming bookmarks') % len(diff))
3945 diff = set(lmarks) - set(rmarks)
3945 diff = set(lmarks) - set(rmarks)
3946 if len(diff) > 0:
3946 if len(diff) > 0:
3947 t.append(_('%d outgoing bookmarks') % len(diff))
3947 t.append(_('%d outgoing bookmarks') % len(diff))
3948
3948
3949 if t:
3949 if t:
3950 ui.write(_('remote: %s\n') % (', '.join(t)))
3950 ui.write(_('remote: %s\n') % (', '.join(t)))
3951 else:
3951 else:
3952 ui.status(_('remote: (synced)\n'))
3952 ui.status(_('remote: (synced)\n'))
3953
3953
3954 def tag(ui, repo, name1, *names, **opts):
3954 def tag(ui, repo, name1, *names, **opts):
3955 """add one or more tags for the current or given revision
3955 """add one or more tags for the current or given revision
3956
3956
3957 Name a particular revision using <name>.
3957 Name a particular revision using <name>.
3958
3958
3959 Tags are used to name particular revisions of the repository and are
3959 Tags are used to name particular revisions of the repository and are
3960 very useful to compare different revisions, to go back to significant
3960 very useful to compare different revisions, to go back to significant
3961 earlier versions or to mark branch points as releases, etc. Changing
3961 earlier versions or to mark branch points as releases, etc. Changing
3962 an existing tag is normally disallowed; use -f/--force to override.
3962 an existing tag is normally disallowed; use -f/--force to override.
3963
3963
3964 If no revision is given, the parent of the working directory is
3964 If no revision is given, the parent of the working directory is
3965 used, or tip if no revision is checked out.
3965 used, or tip if no revision is checked out.
3966
3966
3967 To facilitate version control, distribution, and merging of tags,
3967 To facilitate version control, distribution, and merging of tags,
3968 they are stored as a file named ".hgtags" which is managed similarly
3968 they are stored as a file named ".hgtags" which is managed similarly
3969 to other project files and can be hand-edited if necessary. This
3969 to other project files and can be hand-edited if necessary. This
3970 also means that tagging creates a new commit. The file
3970 also means that tagging creates a new commit. The file
3971 ".hg/localtags" is used for local tags (not shared among
3971 ".hg/localtags" is used for local tags (not shared among
3972 repositories).
3972 repositories).
3973
3973
3974 Tag commits are usually made at the head of a branch. If the parent
3974 Tag commits are usually made at the head of a branch. If the parent
3975 of the working directory is not a branch head, :hg:`tag` aborts; use
3975 of the working directory is not a branch head, :hg:`tag` aborts; use
3976 -f/--force to force the tag commit to be based on a non-head
3976 -f/--force to force the tag commit to be based on a non-head
3977 changeset.
3977 changeset.
3978
3978
3979 See :hg:`help dates` for a list of formats valid for -d/--date.
3979 See :hg:`help dates` for a list of formats valid for -d/--date.
3980
3980
3981 Since tag names have priority over branch names during revision
3981 Since tag names have priority over branch names during revision
3982 lookup, using an existing branch name as a tag name is discouraged.
3982 lookup, using an existing branch name as a tag name is discouraged.
3983
3983
3984 Returns 0 on success.
3984 Returns 0 on success.
3985 """
3985 """
3986
3986
3987 rev_ = "."
3987 rev_ = "."
3988 names = [t.strip() for t in (name1,) + names]
3988 names = [t.strip() for t in (name1,) + names]
3989 if len(names) != len(set(names)):
3989 if len(names) != len(set(names)):
3990 raise util.Abort(_('tag names must be unique'))
3990 raise util.Abort(_('tag names must be unique'))
3991 for n in names:
3991 for n in names:
3992 if n in ['tip', '.', 'null']:
3992 if n in ['tip', '.', 'null']:
3993 raise util.Abort(_('the name \'%s\' is reserved') % n)
3993 raise util.Abort(_('the name \'%s\' is reserved') % n)
3994 if not n:
3994 if not n:
3995 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
3995 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
3996 if opts.get('rev') and opts.get('remove'):
3996 if opts.get('rev') and opts.get('remove'):
3997 raise util.Abort(_("--rev and --remove are incompatible"))
3997 raise util.Abort(_("--rev and --remove are incompatible"))
3998 if opts.get('rev'):
3998 if opts.get('rev'):
3999 rev_ = opts['rev']
3999 rev_ = opts['rev']
4000 message = opts.get('message')
4000 message = opts.get('message')
4001 if opts.get('remove'):
4001 if opts.get('remove'):
4002 expectedtype = opts.get('local') and 'local' or 'global'
4002 expectedtype = opts.get('local') and 'local' or 'global'
4003 for n in names:
4003 for n in names:
4004 if not repo.tagtype(n):
4004 if not repo.tagtype(n):
4005 raise util.Abort(_('tag \'%s\' does not exist') % n)
4005 raise util.Abort(_('tag \'%s\' does not exist') % n)
4006 if repo.tagtype(n) != expectedtype:
4006 if repo.tagtype(n) != expectedtype:
4007 if expectedtype == 'global':
4007 if expectedtype == 'global':
4008 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
4008 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
4009 else:
4009 else:
4010 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
4010 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
4011 rev_ = nullid
4011 rev_ = nullid
4012 if not message:
4012 if not message:
4013 # we don't translate commit messages
4013 # we don't translate commit messages
4014 message = 'Removed tag %s' % ', '.join(names)
4014 message = 'Removed tag %s' % ', '.join(names)
4015 elif not opts.get('force'):
4015 elif not opts.get('force'):
4016 for n in names:
4016 for n in names:
4017 if n in repo.tags():
4017 if n in repo.tags():
4018 raise util.Abort(_('tag \'%s\' already exists '
4018 raise util.Abort(_('tag \'%s\' already exists '
4019 '(use -f to force)') % n)
4019 '(use -f to force)') % n)
4020 if not opts.get('local'):
4020 if not opts.get('local'):
4021 p1, p2 = repo.dirstate.parents()
4021 p1, p2 = repo.dirstate.parents()
4022 if p2 != nullid:
4022 if p2 != nullid:
4023 raise util.Abort(_('uncommitted merge'))
4023 raise util.Abort(_('uncommitted merge'))
4024 bheads = repo.branchheads()
4024 bheads = repo.branchheads()
4025 if not opts.get('force') and bheads and p1 not in bheads:
4025 if not opts.get('force') and bheads and p1 not in bheads:
4026 raise util.Abort(_('not at a branch head (use -f to force)'))
4026 raise util.Abort(_('not at a branch head (use -f to force)'))
4027 r = cmdutil.revsingle(repo, rev_).node()
4027 r = cmdutil.revsingle(repo, rev_).node()
4028
4028
4029 if not message:
4029 if not message:
4030 # we don't translate commit messages
4030 # we don't translate commit messages
4031 message = ('Added tag %s for changeset %s' %
4031 message = ('Added tag %s for changeset %s' %
4032 (', '.join(names), short(r)))
4032 (', '.join(names), short(r)))
4033
4033
4034 date = opts.get('date')
4034 date = opts.get('date')
4035 if date:
4035 if date:
4036 date = util.parsedate(date)
4036 date = util.parsedate(date)
4037
4037
4038 if opts.get('edit'):
4038 if opts.get('edit'):
4039 message = ui.edit(message, ui.username())
4039 message = ui.edit(message, ui.username())
4040
4040
4041 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
4041 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
4042
4042
4043 def tags(ui, repo):
4043 def tags(ui, repo):
4044 """list repository tags
4044 """list repository tags
4045
4045
4046 This lists both regular and local tags. When the -v/--verbose
4046 This lists both regular and local tags. When the -v/--verbose
4047 switch is used, a third column "local" is printed for local tags.
4047 switch is used, a third column "local" is printed for local tags.
4048
4048
4049 Returns 0 on success.
4049 Returns 0 on success.
4050 """
4050 """
4051
4051
4052 hexfunc = ui.debugflag and hex or short
4052 hexfunc = ui.debugflag and hex or short
4053 tagtype = ""
4053 tagtype = ""
4054
4054
4055 for t, n in reversed(repo.tagslist()):
4055 for t, n in reversed(repo.tagslist()):
4056 if ui.quiet:
4056 if ui.quiet:
4057 ui.write("%s\n" % t)
4057 ui.write("%s\n" % t)
4058 continue
4058 continue
4059
4059
4060 hn = hexfunc(n)
4060 hn = hexfunc(n)
4061 r = "%5d:%s" % (repo.changelog.rev(n), hn)
4061 r = "%5d:%s" % (repo.changelog.rev(n), hn)
4062 spaces = " " * (30 - encoding.colwidth(t))
4062 spaces = " " * (30 - encoding.colwidth(t))
4063
4063
4064 if ui.verbose:
4064 if ui.verbose:
4065 if repo.tagtype(t) == 'local':
4065 if repo.tagtype(t) == 'local':
4066 tagtype = " local"
4066 tagtype = " local"
4067 else:
4067 else:
4068 tagtype = ""
4068 tagtype = ""
4069 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
4069 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
4070
4070
4071 def tip(ui, repo, **opts):
4071 def tip(ui, repo, **opts):
4072 """show the tip revision
4072 """show the tip revision
4073
4073
4074 The tip revision (usually just called the tip) is the changeset
4074 The tip revision (usually just called the tip) is the changeset
4075 most recently added to the repository (and therefore the most
4075 most recently added to the repository (and therefore the most
4076 recently changed head).
4076 recently changed head).
4077
4077
4078 If you have just made a commit, that commit will be the tip. If
4078 If you have just made a commit, that commit will be the tip. If
4079 you have just pulled changes from another repository, the tip of
4079 you have just pulled changes from another repository, the tip of
4080 that repository becomes the current tip. The "tip" tag is special
4080 that repository becomes the current tip. The "tip" tag is special
4081 and cannot be renamed or assigned to a different changeset.
4081 and cannot be renamed or assigned to a different changeset.
4082
4082
4083 Returns 0 on success.
4083 Returns 0 on success.
4084 """
4084 """
4085 displayer = cmdutil.show_changeset(ui, repo, opts)
4085 displayer = cmdutil.show_changeset(ui, repo, opts)
4086 displayer.show(repo[len(repo) - 1])
4086 displayer.show(repo[len(repo) - 1])
4087 displayer.close()
4087 displayer.close()
4088
4088
4089 def unbundle(ui, repo, fname1, *fnames, **opts):
4089 def unbundle(ui, repo, fname1, *fnames, **opts):
4090 """apply one or more changegroup files
4090 """apply one or more changegroup files
4091
4091
4092 Apply one or more compressed changegroup files generated by the
4092 Apply one or more compressed changegroup files generated by the
4093 bundle command.
4093 bundle command.
4094
4094
4095 Returns 0 on success, 1 if an update has unresolved files.
4095 Returns 0 on success, 1 if an update has unresolved files.
4096 """
4096 """
4097 fnames = (fname1,) + fnames
4097 fnames = (fname1,) + fnames
4098
4098
4099 lock = repo.lock()
4099 lock = repo.lock()
4100 wc = repo['.']
4100 wc = repo['.']
4101 try:
4101 try:
4102 for fname in fnames:
4102 for fname in fnames:
4103 f = url.open(ui, fname)
4103 f = url.open(ui, fname)
4104 gen = changegroup.readbundle(f, fname)
4104 gen = changegroup.readbundle(f, fname)
4105 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
4105 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
4106 lock=lock)
4106 lock=lock)
4107 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
4107 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
4108 finally:
4108 finally:
4109 lock.release()
4109 lock.release()
4110 return postincoming(ui, repo, modheads, opts.get('update'), None)
4110 return postincoming(ui, repo, modheads, opts.get('update'), None)
4111
4111
4112 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
4112 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
4113 """update working directory (or switch revisions)
4113 """update working directory (or switch revisions)
4114
4114
4115 Update the repository's working directory to the specified
4115 Update the repository's working directory to the specified
4116 changeset. If no changeset is specified, update to the tip of the
4116 changeset. If no changeset is specified, update to the tip of the
4117 current named branch.
4117 current named branch.
4118
4118
4119 If the changeset is not a descendant of the working directory's
4119 If the changeset is not a descendant of the working directory's
4120 parent, the update is aborted. With the -c/--check option, the
4120 parent, the update is aborted. With the -c/--check option, the
4121 working directory is checked for uncommitted changes; if none are
4121 working directory is checked for uncommitted changes; if none are
4122 found, the working directory is updated to the specified
4122 found, the working directory is updated to the specified
4123 changeset.
4123 changeset.
4124
4124
4125 The following rules apply when the working directory contains
4125 The following rules apply when the working directory contains
4126 uncommitted changes:
4126 uncommitted changes:
4127
4127
4128 1. If neither -c/--check nor -C/--clean is specified, and if
4128 1. If neither -c/--check nor -C/--clean is specified, and if
4129 the requested changeset is an ancestor or descendant of
4129 the requested changeset is an ancestor or descendant of
4130 the working directory's parent, the uncommitted changes
4130 the working directory's parent, the uncommitted changes
4131 are merged into the requested changeset and the merged
4131 are merged into the requested changeset and the merged
4132 result is left uncommitted. If the requested changeset is
4132 result is left uncommitted. If the requested changeset is
4133 not an ancestor or descendant (that is, it is on another
4133 not an ancestor or descendant (that is, it is on another
4134 branch), the update is aborted and the uncommitted changes
4134 branch), the update is aborted and the uncommitted changes
4135 are preserved.
4135 are preserved.
4136
4136
4137 2. With the -c/--check option, the update is aborted and the
4137 2. With the -c/--check option, the update is aborted and the
4138 uncommitted changes are preserved.
4138 uncommitted changes are preserved.
4139
4139
4140 3. With the -C/--clean option, uncommitted changes are discarded and
4140 3. With the -C/--clean option, uncommitted changes are discarded and
4141 the working directory is updated to the requested changeset.
4141 the working directory is updated to the requested changeset.
4142
4142
4143 Use null as the changeset to remove the working directory (like
4143 Use null as the changeset to remove the working directory (like
4144 :hg:`clone -U`).
4144 :hg:`clone -U`).
4145
4145
4146 If you want to update just one file to an older changeset, use
4146 If you want to update just one file to an older changeset, use
4147 :hg:`revert`.
4147 :hg:`revert`.
4148
4148
4149 See :hg:`help dates` for a list of formats valid for -d/--date.
4149 See :hg:`help dates` for a list of formats valid for -d/--date.
4150
4150
4151 Returns 0 on success, 1 if there are unresolved files.
4151 Returns 0 on success, 1 if there are unresolved files.
4152 """
4152 """
4153 if rev and node:
4153 if rev and node:
4154 raise util.Abort(_("please specify just one revision"))
4154 raise util.Abort(_("please specify just one revision"))
4155
4155
4156 if rev is None or rev == '':
4156 if rev is None or rev == '':
4157 rev = node
4157 rev = node
4158
4158
4159 # if we defined a bookmark, we have to remember the original bookmark name
4159 # if we defined a bookmark, we have to remember the original bookmark name
4160 brev = rev
4160 brev = rev
4161 rev = cmdutil.revsingle(repo, rev, rev).rev()
4161 rev = cmdutil.revsingle(repo, rev, rev).rev()
4162
4162
4163 if check and clean:
4163 if check and clean:
4164 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
4164 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
4165
4165
4166 if check:
4166 if check:
4167 # we could use dirty() but we can ignore merge and branch trivia
4167 # we could use dirty() but we can ignore merge and branch trivia
4168 c = repo[None]
4168 c = repo[None]
4169 if c.modified() or c.added() or c.removed():
4169 if c.modified() or c.added() or c.removed():
4170 raise util.Abort(_("uncommitted local changes"))
4170 raise util.Abort(_("uncommitted local changes"))
4171
4171
4172 if date:
4172 if date:
4173 if rev is not None:
4173 if rev is not None:
4174 raise util.Abort(_("you can't specify a revision and a date"))
4174 raise util.Abort(_("you can't specify a revision and a date"))
4175 rev = cmdutil.finddate(ui, repo, date)
4175 rev = cmdutil.finddate(ui, repo, date)
4176
4176
4177 if clean or check:
4177 if clean or check:
4178 ret = hg.clean(repo, rev)
4178 ret = hg.clean(repo, rev)
4179 else:
4179 else:
4180 ret = hg.update(repo, rev)
4180 ret = hg.update(repo, rev)
4181
4181
4182 if brev in repo._bookmarks:
4182 if brev in repo._bookmarks:
4183 bookmarks.setcurrent(repo, brev)
4183 bookmarks.setcurrent(repo, brev)
4184
4184
4185 return ret
4185 return ret
4186
4186
4187 def verify(ui, repo):
4187 def verify(ui, repo):
4188 """verify the integrity of the repository
4188 """verify the integrity of the repository
4189
4189
4190 Verify the integrity of the current repository.
4190 Verify the integrity of the current repository.
4191
4191
4192 This will perform an extensive check of the repository's
4192 This will perform an extensive check of the repository's
4193 integrity, validating the hashes and checksums of each entry in
4193 integrity, validating the hashes and checksums of each entry in
4194 the changelog, manifest, and tracked files, as well as the
4194 the changelog, manifest, and tracked files, as well as the
4195 integrity of their crosslinks and indices.
4195 integrity of their crosslinks and indices.
4196
4196
4197 Returns 0 on success, 1 if errors are encountered.
4197 Returns 0 on success, 1 if errors are encountered.
4198 """
4198 """
4199 return hg.verify(repo)
4199 return hg.verify(repo)
4200
4200
4201 def version_(ui):
4201 def version_(ui):
4202 """output version and copyright information"""
4202 """output version and copyright information"""
4203 ui.write(_("Mercurial Distributed SCM (version %s)\n")
4203 ui.write(_("Mercurial Distributed SCM (version %s)\n")
4204 % util.version())
4204 % util.version())
4205 ui.status(_(
4205 ui.status(_(
4206 "(see http://mercurial.selenic.com for more information)\n"
4206 "(see http://mercurial.selenic.com for more information)\n"
4207 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
4207 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
4208 "This is free software; see the source for copying conditions. "
4208 "This is free software; see the source for copying conditions. "
4209 "There is NO\nwarranty; "
4209 "There is NO\nwarranty; "
4210 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
4210 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
4211 ))
4211 ))
4212
4212
4213 # Command options and aliases are listed here, alphabetically
4213 # Command options and aliases are listed here, alphabetically
4214
4214
4215 globalopts = [
4215 globalopts = [
4216 ('R', 'repository', '',
4216 ('R', 'repository', '',
4217 _('repository root directory or name of overlay bundle file'),
4217 _('repository root directory or name of overlay bundle file'),
4218 _('REPO')),
4218 _('REPO')),
4219 ('', 'cwd', '',
4219 ('', 'cwd', '',
4220 _('change working directory'), _('DIR')),
4220 _('change working directory'), _('DIR')),
4221 ('y', 'noninteractive', None,
4221 ('y', 'noninteractive', None,
4222 _('do not prompt, assume \'yes\' for any required answers')),
4222 _('do not prompt, assume \'yes\' for any required answers')),
4223 ('q', 'quiet', None, _('suppress output')),
4223 ('q', 'quiet', None, _('suppress output')),
4224 ('v', 'verbose', None, _('enable additional output')),
4224 ('v', 'verbose', None, _('enable additional output')),
4225 ('', 'config', [],
4225 ('', 'config', [],
4226 _('set/override config option (use \'section.name=value\')'),
4226 _('set/override config option (use \'section.name=value\')'),
4227 _('CONFIG')),
4227 _('CONFIG')),
4228 ('', 'debug', None, _('enable debugging output')),
4228 ('', 'debug', None, _('enable debugging output')),
4229 ('', 'debugger', None, _('start debugger')),
4229 ('', 'debugger', None, _('start debugger')),
4230 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
4230 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
4231 _('ENCODE')),
4231 _('ENCODE')),
4232 ('', 'encodingmode', encoding.encodingmode,
4232 ('', 'encodingmode', encoding.encodingmode,
4233 _('set the charset encoding mode'), _('MODE')),
4233 _('set the charset encoding mode'), _('MODE')),
4234 ('', 'traceback', None, _('always print a traceback on exception')),
4234 ('', 'traceback', None, _('always print a traceback on exception')),
4235 ('', 'time', None, _('time how long the command takes')),
4235 ('', 'time', None, _('time how long the command takes')),
4236 ('', 'profile', None, _('print command execution profile')),
4236 ('', 'profile', None, _('print command execution profile')),
4237 ('', 'version', None, _('output version information and exit')),
4237 ('', 'version', None, _('output version information and exit')),
4238 ('h', 'help', None, _('display help and exit')),
4238 ('h', 'help', None, _('display help and exit')),
4239 ]
4239 ]
4240
4240
4241 dryrunopts = [('n', 'dry-run', None,
4241 dryrunopts = [('n', 'dry-run', None,
4242 _('do not perform actions, just print output'))]
4242 _('do not perform actions, just print output'))]
4243
4243
4244 remoteopts = [
4244 remoteopts = [
4245 ('e', 'ssh', '',
4245 ('e', 'ssh', '',
4246 _('specify ssh command to use'), _('CMD')),
4246 _('specify ssh command to use'), _('CMD')),
4247 ('', 'remotecmd', '',
4247 ('', 'remotecmd', '',
4248 _('specify hg command to run on the remote side'), _('CMD')),
4248 _('specify hg command to run on the remote side'), _('CMD')),
4249 ('', 'insecure', None,
4249 ('', 'insecure', None,
4250 _('do not verify server certificate (ignoring web.cacerts config)')),
4250 _('do not verify server certificate (ignoring web.cacerts config)')),
4251 ]
4251 ]
4252
4252
4253 walkopts = [
4253 walkopts = [
4254 ('I', 'include', [],
4254 ('I', 'include', [],
4255 _('include names matching the given patterns'), _('PATTERN')),
4255 _('include names matching the given patterns'), _('PATTERN')),
4256 ('X', 'exclude', [],
4256 ('X', 'exclude', [],
4257 _('exclude names matching the given patterns'), _('PATTERN')),
4257 _('exclude names matching the given patterns'), _('PATTERN')),
4258 ]
4258 ]
4259
4259
4260 commitopts = [
4260 commitopts = [
4261 ('m', 'message', '',
4261 ('m', 'message', '',
4262 _('use text as commit message'), _('TEXT')),
4262 _('use text as commit message'), _('TEXT')),
4263 ('l', 'logfile', '',
4263 ('l', 'logfile', '',
4264 _('read commit message from file'), _('FILE')),
4264 _('read commit message from file'), _('FILE')),
4265 ]
4265 ]
4266
4266
4267 commitopts2 = [
4267 commitopts2 = [
4268 ('d', 'date', '',
4268 ('d', 'date', '',
4269 _('record the specified date as commit date'), _('DATE')),
4269 _('record the specified date as commit date'), _('DATE')),
4270 ('u', 'user', '',
4270 ('u', 'user', '',
4271 _('record the specified user as committer'), _('USER')),
4271 _('record the specified user as committer'), _('USER')),
4272 ]
4272 ]
4273
4273
4274 templateopts = [
4274 templateopts = [
4275 ('', 'style', '',
4275 ('', 'style', '',
4276 _('display using template map file'), _('STYLE')),
4276 _('display using template map file'), _('STYLE')),
4277 ('', 'template', '',
4277 ('', 'template', '',
4278 _('display with template'), _('TEMPLATE')),
4278 _('display with template'), _('TEMPLATE')),
4279 ]
4279 ]
4280
4280
4281 logopts = [
4281 logopts = [
4282 ('p', 'patch', None, _('show patch')),
4282 ('p', 'patch', None, _('show patch')),
4283 ('g', 'git', None, _('use git extended diff format')),
4283 ('g', 'git', None, _('use git extended diff format')),
4284 ('l', 'limit', '',
4284 ('l', 'limit', '',
4285 _('limit number of changes displayed'), _('NUM')),
4285 _('limit number of changes displayed'), _('NUM')),
4286 ('M', 'no-merges', None, _('do not show merges')),
4286 ('M', 'no-merges', None, _('do not show merges')),
4287 ('', 'stat', None, _('output diffstat-style summary of changes')),
4287 ('', 'stat', None, _('output diffstat-style summary of changes')),
4288 ] + templateopts
4288 ] + templateopts
4289
4289
4290 diffopts = [
4290 diffopts = [
4291 ('a', 'text', None, _('treat all files as text')),
4291 ('a', 'text', None, _('treat all files as text')),
4292 ('g', 'git', None, _('use git extended diff format')),
4292 ('g', 'git', None, _('use git extended diff format')),
4293 ('', 'nodates', None, _('omit dates from diff headers'))
4293 ('', 'nodates', None, _('omit dates from diff headers'))
4294 ]
4294 ]
4295
4295
4296 diffopts2 = [
4296 diffopts2 = [
4297 ('p', 'show-function', None, _('show which function each change is in')),
4297 ('p', 'show-function', None, _('show which function each change is in')),
4298 ('', 'reverse', None, _('produce a diff that undoes the changes')),
4298 ('', 'reverse', None, _('produce a diff that undoes the changes')),
4299 ('w', 'ignore-all-space', None,
4299 ('w', 'ignore-all-space', None,
4300 _('ignore white space when comparing lines')),
4300 _('ignore white space when comparing lines')),
4301 ('b', 'ignore-space-change', None,
4301 ('b', 'ignore-space-change', None,
4302 _('ignore changes in the amount of white space')),
4302 _('ignore changes in the amount of white space')),
4303 ('B', 'ignore-blank-lines', None,
4303 ('B', 'ignore-blank-lines', None,
4304 _('ignore changes whose lines are all blank')),
4304 _('ignore changes whose lines are all blank')),
4305 ('U', 'unified', '',
4305 ('U', 'unified', '',
4306 _('number of lines of context to show'), _('NUM')),
4306 _('number of lines of context to show'), _('NUM')),
4307 ('', 'stat', None, _('output diffstat-style summary of changes')),
4307 ('', 'stat', None, _('output diffstat-style summary of changes')),
4308 ]
4308 ]
4309
4309
4310 similarityopts = [
4310 similarityopts = [
4311 ('s', 'similarity', '',
4311 ('s', 'similarity', '',
4312 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
4312 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
4313 ]
4313 ]
4314
4314
4315 subrepoopts = [
4315 subrepoopts = [
4316 ('S', 'subrepos', None,
4316 ('S', 'subrepos', None,
4317 _('recurse into subrepositories'))
4317 _('recurse into subrepositories'))
4318 ]
4318 ]
4319
4319
4320 table = {
4320 table = {
4321 "^add": (add, walkopts + subrepoopts + dryrunopts,
4321 "^add": (add, walkopts + subrepoopts + dryrunopts,
4322 _('[OPTION]... [FILE]...')),
4322 _('[OPTION]... [FILE]...')),
4323 "addremove":
4323 "addremove":
4324 (addremove, similarityopts + walkopts + dryrunopts,
4324 (addremove, similarityopts + walkopts + dryrunopts,
4325 _('[OPTION]... [FILE]...')),
4325 _('[OPTION]... [FILE]...')),
4326 "^annotate|blame":
4326 "^annotate|blame":
4327 (annotate,
4327 (annotate,
4328 [('r', 'rev', '',
4328 [('r', 'rev', '',
4329 _('annotate the specified revision'), _('REV')),
4329 _('annotate the specified revision'), _('REV')),
4330 ('', 'follow', None,
4330 ('', 'follow', None,
4331 _('follow copies/renames and list the filename (DEPRECATED)')),
4331 _('follow copies/renames and list the filename (DEPRECATED)')),
4332 ('', 'no-follow', None, _("don't follow copies and renames")),
4332 ('', 'no-follow', None, _("don't follow copies and renames")),
4333 ('a', 'text', None, _('treat all files as text')),
4333 ('a', 'text', None, _('treat all files as text')),
4334 ('u', 'user', None, _('list the author (long with -v)')),
4334 ('u', 'user', None, _('list the author (long with -v)')),
4335 ('f', 'file', None, _('list the filename')),
4335 ('f', 'file', None, _('list the filename')),
4336 ('d', 'date', None, _('list the date (short with -q)')),
4336 ('d', 'date', None, _('list the date (short with -q)')),
4337 ('n', 'number', None, _('list the revision number (default)')),
4337 ('n', 'number', None, _('list the revision number (default)')),
4338 ('c', 'changeset', None, _('list the changeset')),
4338 ('c', 'changeset', None, _('list the changeset')),
4339 ('l', 'line-number', None,
4339 ('l', 'line-number', None,
4340 _('show line number at the first appearance'))
4340 _('show line number at the first appearance'))
4341 ] + walkopts,
4341 ] + walkopts,
4342 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
4342 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
4343 "archive":
4343 "archive":
4344 (archive,
4344 (archive,
4345 [('', 'no-decode', None, _('do not pass files through decoders')),
4345 [('', 'no-decode', None, _('do not pass files through decoders')),
4346 ('p', 'prefix', '',
4346 ('p', 'prefix', '',
4347 _('directory prefix for files in archive'), _('PREFIX')),
4347 _('directory prefix for files in archive'), _('PREFIX')),
4348 ('r', 'rev', '',
4348 ('r', 'rev', '',
4349 _('revision to distribute'), _('REV')),
4349 _('revision to distribute'), _('REV')),
4350 ('t', 'type', '',
4350 ('t', 'type', '',
4351 _('type of distribution to create'), _('TYPE')),
4351 _('type of distribution to create'), _('TYPE')),
4352 ] + subrepoopts + walkopts,
4352 ] + subrepoopts + walkopts,
4353 _('[OPTION]... DEST')),
4353 _('[OPTION]... DEST')),
4354 "backout":
4354 "backout":
4355 (backout,
4355 (backout,
4356 [('', 'merge', None,
4356 [('', 'merge', None,
4357 _('merge with old dirstate parent after backout')),
4357 _('merge with old dirstate parent after backout')),
4358 ('', 'parent', '',
4358 ('', 'parent', '',
4359 _('parent to choose when backing out merge'), _('REV')),
4359 _('parent to choose when backing out merge'), _('REV')),
4360 ('t', 'tool', '',
4360 ('t', 'tool', '',
4361 _('specify merge tool')),
4361 _('specify merge tool')),
4362 ('r', 'rev', '',
4362 ('r', 'rev', '',
4363 _('revision to backout'), _('REV')),
4363 _('revision to backout'), _('REV')),
4364 ] + walkopts + commitopts + commitopts2,
4364 ] + walkopts + commitopts + commitopts2,
4365 _('[OPTION]... [-r] REV')),
4365 _('[OPTION]... [-r] REV')),
4366 "bisect":
4366 "bisect":
4367 (bisect,
4367 (bisect,
4368 [('r', 'reset', False, _('reset bisect state')),
4368 [('r', 'reset', False, _('reset bisect state')),
4369 ('g', 'good', False, _('mark changeset good')),
4369 ('g', 'good', False, _('mark changeset good')),
4370 ('b', 'bad', False, _('mark changeset bad')),
4370 ('b', 'bad', False, _('mark changeset bad')),
4371 ('s', 'skip', False, _('skip testing changeset')),
4371 ('s', 'skip', False, _('skip testing changeset')),
4372 ('e', 'extend', False, _('extend the bisect range')),
4372 ('e', 'extend', False, _('extend the bisect range')),
4373 ('c', 'command', '',
4373 ('c', 'command', '',
4374 _('use command to check changeset state'), _('CMD')),
4374 _('use command to check changeset state'), _('CMD')),
4375 ('U', 'noupdate', False, _('do not update to target'))],
4375 ('U', 'noupdate', False, _('do not update to target'))],
4376 _("[-gbsr] [-U] [-c CMD] [REV]")),
4376 _("[-gbsr] [-U] [-c CMD] [REV]")),
4377 "bookmarks":
4377 "bookmarks":
4378 (bookmark,
4378 (bookmark,
4379 [('f', 'force', False, _('force')),
4379 [('f', 'force', False, _('force')),
4380 ('r', 'rev', '', _('revision'), _('REV')),
4380 ('r', 'rev', '', _('revision'), _('REV')),
4381 ('d', 'delete', False, _('delete a given bookmark')),
4381 ('d', 'delete', False, _('delete a given bookmark')),
4382 ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))],
4382 ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))],
4383 _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')),
4383 _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')),
4384 "branch":
4384 "branch":
4385 (branch,
4385 (branch,
4386 [('f', 'force', None,
4386 [('f', 'force', None,
4387 _('set branch name even if it shadows an existing branch')),
4387 _('set branch name even if it shadows an existing branch')),
4388 ('C', 'clean', None, _('reset branch name to parent branch name'))],
4388 ('C', 'clean', None, _('reset branch name to parent branch name'))],
4389 _('[-fC] [NAME]')),
4389 _('[-fC] [NAME]')),
4390 "branches":
4390 "branches":
4391 (branches,
4391 (branches,
4392 [('a', 'active', False,
4392 [('a', 'active', False,
4393 _('show only branches that have unmerged heads')),
4393 _('show only branches that have unmerged heads')),
4394 ('c', 'closed', False,
4394 ('c', 'closed', False,
4395 _('show normal and closed branches'))],
4395 _('show normal and closed branches'))],
4396 _('[-ac]')),
4396 _('[-ac]')),
4397 "bundle":
4397 "bundle":
4398 (bundle,
4398 (bundle,
4399 [('f', 'force', None,
4399 [('f', 'force', None,
4400 _('run even when the destination is unrelated')),
4400 _('run even when the destination is unrelated')),
4401 ('r', 'rev', [],
4401 ('r', 'rev', [],
4402 _('a changeset intended to be added to the destination'),
4402 _('a changeset intended to be added to the destination'),
4403 _('REV')),
4403 _('REV')),
4404 ('b', 'branch', [],
4404 ('b', 'branch', [],
4405 _('a specific branch you would like to bundle'),
4405 _('a specific branch you would like to bundle'),
4406 _('BRANCH')),
4406 _('BRANCH')),
4407 ('', 'base', [],
4407 ('', 'base', [],
4408 _('a base changeset assumed to be available at the destination'),
4408 _('a base changeset assumed to be available at the destination'),
4409 _('REV')),
4409 _('REV')),
4410 ('a', 'all', None, _('bundle all changesets in the repository')),
4410 ('a', 'all', None, _('bundle all changesets in the repository')),
4411 ('t', 'type', 'bzip2',
4411 ('t', 'type', 'bzip2',
4412 _('bundle compression type to use'), _('TYPE')),
4412 _('bundle compression type to use'), _('TYPE')),
4413 ] + remoteopts,
4413 ] + remoteopts,
4414 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
4414 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
4415 "cat":
4415 "cat":
4416 (cat,
4416 (cat,
4417 [('o', 'output', '',
4417 [('o', 'output', '',
4418 _('print output to file with formatted name'), _('FORMAT')),
4418 _('print output to file with formatted name'), _('FORMAT')),
4419 ('r', 'rev', '',
4419 ('r', 'rev', '',
4420 _('print the given revision'), _('REV')),
4420 _('print the given revision'), _('REV')),
4421 ('', 'decode', None, _('apply any matching decode filter')),
4421 ('', 'decode', None, _('apply any matching decode filter')),
4422 ] + walkopts,
4422 ] + walkopts,
4423 _('[OPTION]... FILE...')),
4423 _('[OPTION]... FILE...')),
4424 "^clone":
4424 "^clone":
4425 (clone,
4425 (clone,
4426 [('U', 'noupdate', None,
4426 [('U', 'noupdate', None,
4427 _('the clone will include an empty working copy (only a repository)')),
4427 _('the clone will include an empty working copy (only a repository)')),
4428 ('u', 'updaterev', '',
4428 ('u', 'updaterev', '',
4429 _('revision, tag or branch to check out'), _('REV')),
4429 _('revision, tag or branch to check out'), _('REV')),
4430 ('r', 'rev', [],
4430 ('r', 'rev', [],
4431 _('include the specified changeset'), _('REV')),
4431 _('include the specified changeset'), _('REV')),
4432 ('b', 'branch', [],
4432 ('b', 'branch', [],
4433 _('clone only the specified branch'), _('BRANCH')),
4433 _('clone only the specified branch'), _('BRANCH')),
4434 ('', 'pull', None, _('use pull protocol to copy metadata')),
4434 ('', 'pull', None, _('use pull protocol to copy metadata')),
4435 ('', 'uncompressed', None,
4435 ('', 'uncompressed', None,
4436 _('use uncompressed transfer (fast over LAN)')),
4436 _('use uncompressed transfer (fast over LAN)')),
4437 ] + remoteopts,
4437 ] + remoteopts,
4438 _('[OPTION]... SOURCE [DEST]')),
4438 _('[OPTION]... SOURCE [DEST]')),
4439 "^commit|ci":
4439 "^commit|ci":
4440 (commit,
4440 (commit,
4441 [('A', 'addremove', None,
4441 [('A', 'addremove', None,
4442 _('mark new/missing files as added/removed before committing')),
4442 _('mark new/missing files as added/removed before committing')),
4443 ('', 'close-branch', None,
4443 ('', 'close-branch', None,
4444 _('mark a branch as closed, hiding it from the branch list')),
4444 _('mark a branch as closed, hiding it from the branch list')),
4445 ] + walkopts + commitopts + commitopts2,
4445 ] + walkopts + commitopts + commitopts2,
4446 _('[OPTION]... [FILE]...')),
4446 _('[OPTION]... [FILE]...')),
4447 "copy|cp":
4447 "copy|cp":
4448 (copy,
4448 (copy,
4449 [('A', 'after', None, _('record a copy that has already occurred')),
4449 [('A', 'after', None, _('record a copy that has already occurred')),
4450 ('f', 'force', None,
4450 ('f', 'force', None,
4451 _('forcibly copy over an existing managed file')),
4451 _('forcibly copy over an existing managed file')),
4452 ] + walkopts + dryrunopts,
4452 ] + walkopts + dryrunopts,
4453 _('[OPTION]... [SOURCE]... DEST')),
4453 _('[OPTION]... [SOURCE]... DEST')),
4454 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
4454 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
4455 "debugbuilddag":
4455 "debugbuilddag":
4456 (debugbuilddag,
4456 (debugbuilddag,
4457 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
4457 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
4458 ('a', 'appended-file', None, _('add single file all revs append to')),
4458 ('a', 'appended-file', None, _('add single file all revs append to')),
4459 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
4459 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
4460 ('n', 'new-file', None, _('add new file at each rev')),
4460 ('n', 'new-file', None, _('add new file at each rev')),
4461 ],
4461 ],
4462 _('[OPTION]... TEXT')),
4462 _('[OPTION]... TEXT')),
4463 "debugbundle":
4463 "debugbundle":
4464 (debugbundle,
4464 (debugbundle,
4465 [('a', 'all', None, _('show all details')),
4465 [('a', 'all', None, _('show all details')),
4466 ],
4466 ],
4467 _('FILE')),
4467 _('FILE')),
4468 "debugcheckstate": (debugcheckstate, [], ''),
4468 "debugcheckstate": (debugcheckstate, [], ''),
4469 "debugcommands": (debugcommands, [], _('[COMMAND]')),
4469 "debugcommands": (debugcommands, [], _('[COMMAND]')),
4470 "debugcomplete":
4470 "debugcomplete":
4471 (debugcomplete,
4471 (debugcomplete,
4472 [('o', 'options', None, _('show the command options'))],
4472 [('o', 'options', None, _('show the command options'))],
4473 _('[-o] CMD')),
4473 _('[-o] CMD')),
4474 "debugdag":
4474 "debugdag":
4475 (debugdag,
4475 (debugdag,
4476 [('t', 'tags', None, _('use tags as labels')),
4476 [('t', 'tags', None, _('use tags as labels')),
4477 ('b', 'branches', None, _('annotate with branch names')),
4477 ('b', 'branches', None, _('annotate with branch names')),
4478 ('', 'dots', None, _('use dots for runs')),
4478 ('', 'dots', None, _('use dots for runs')),
4479 ('s', 'spaces', None, _('separate elements by spaces')),
4479 ('s', 'spaces', None, _('separate elements by spaces')),
4480 ],
4480 ],
4481 _('[OPTION]... [FILE [REV]...]')),
4481 _('[OPTION]... [FILE [REV]...]')),
4482 "debugdate":
4482 "debugdate":
4483 (debugdate,
4483 (debugdate,
4484 [('e', 'extended', None, _('try extended date formats'))],
4484 [('e', 'extended', None, _('try extended date formats'))],
4485 _('[-e] DATE [RANGE]')),
4485 _('[-e] DATE [RANGE]')),
4486 "debugdata": (debugdata, [], _('FILE REV')),
4486 "debugdata": (debugdata, [], _('FILE REV')),
4487 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
4487 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
4488 "debuggetbundle":
4488 "debuggetbundle":
4489 (debuggetbundle,
4489 (debuggetbundle,
4490 [('H', 'head', [], _('id of head node'), _('ID')),
4490 [('H', 'head', [], _('id of head node'), _('ID')),
4491 ('C', 'common', [], _('id of common node'), _('ID')),
4491 ('C', 'common', [], _('id of common node'), _('ID')),
4492 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
4492 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
4493 ],
4493 ],
4494 _('REPO FILE [-H|-C ID]...')),
4494 _('REPO FILE [-H|-C ID]...')),
4495 "debugignore": (debugignore, [], ''),
4495 "debugignore": (debugignore, [], ''),
4496 "debugindex": (debugindex,
4496 "debugindex": (debugindex,
4497 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
4497 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
4498 _('FILE')),
4498 _('FILE')),
4499 "debugindexdot": (debugindexdot, [], _('FILE')),
4499 "debugindexdot": (debugindexdot, [], _('FILE')),
4500 "debuginstall": (debuginstall, [], ''),
4500 "debuginstall": (debuginstall, [], ''),
4501 "debugknown": (debugknown, [], _('REPO ID...')),
4501 "debugknown": (debugknown, [], _('REPO ID...')),
4502 "debugpushkey": (debugpushkey, [], _('REPO NAMESPACE [KEY OLD NEW]')),
4502 "debugpushkey": (debugpushkey, [], _('REPO NAMESPACE [KEY OLD NEW]')),
4503 "debugrebuildstate":
4503 "debugrebuildstate":
4504 (debugrebuildstate,
4504 (debugrebuildstate,
4505 [('r', 'rev', '',
4505 [('r', 'rev', '',
4506 _('revision to rebuild to'), _('REV'))],
4506 _('revision to rebuild to'), _('REV'))],
4507 _('[-r REV] [REV]')),
4507 _('[-r REV] [REV]')),
4508 "debugrename":
4508 "debugrename":
4509 (debugrename,
4509 (debugrename,
4510 [('r', 'rev', '',
4510 [('r', 'rev', '',
4511 _('revision to debug'), _('REV'))],
4511 _('revision to debug'), _('REV'))],
4512 _('[-r REV] FILE')),
4512 _('[-r REV] FILE')),
4513 "debugrevspec":
4513 "debugrevspec":
4514 (debugrevspec, [], ('REVSPEC')),
4514 (debugrevspec, [], ('REVSPEC')),
4515 "debugsetparents":
4515 "debugsetparents":
4516 (debugsetparents, [], _('REV1 [REV2]')),
4516 (debugsetparents, [], _('REV1 [REV2]')),
4517 "debugstate":
4517 "debugstate":
4518 (debugstate,
4518 (debugstate,
4519 [('', 'nodates', None, _('do not display the saved mtime')),
4519 [('', 'nodates', None, _('do not display the saved mtime')),
4520 ('', 'datesort', None, _('sort by saved mtime'))],
4520 ('', 'datesort', None, _('sort by saved mtime'))],
4521 _('[OPTION]...')),
4521 _('[OPTION]...')),
4522 "debugsub":
4522 "debugsub":
4523 (debugsub,
4523 (debugsub,
4524 [('r', 'rev', '',
4524 [('r', 'rev', '',
4525 _('revision to check'), _('REV'))],
4525 _('revision to check'), _('REV'))],
4526 _('[-r REV] [REV]')),
4526 _('[-r REV] [REV]')),
4527 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
4527 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
4528 "debugwireargs":
4528 "debugwireargs":
4529 (debugwireargs,
4529 (debugwireargs,
4530 [('', 'three', '', 'three'),
4530 [('', 'three', '', 'three'),
4531 ('', 'four', '', 'four'),
4531 ('', 'four', '', 'four'),
4532 ('', 'five', '', 'five'),
4532 ('', 'five', '', 'five'),
4533 ] + remoteopts,
4533 ] + remoteopts,
4534 _('REPO [OPTIONS]... [ONE [TWO]]')),
4534 _('REPO [OPTIONS]... [ONE [TWO]]')),
4535 "^diff":
4535 "^diff":
4536 (diff,
4536 (diff,
4537 [('r', 'rev', [],
4537 [('r', 'rev', [],
4538 _('revision'), _('REV')),
4538 _('revision'), _('REV')),
4539 ('c', 'change', '',
4539 ('c', 'change', '',
4540 _('change made by revision'), _('REV'))
4540 _('change made by revision'), _('REV'))
4541 ] + diffopts + diffopts2 + walkopts + subrepoopts,
4541 ] + diffopts + diffopts2 + walkopts + subrepoopts,
4542 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
4542 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
4543 "^export":
4543 "^export":
4544 (export,
4544 (export,
4545 [('o', 'output', '',
4545 [('o', 'output', '',
4546 _('print output to file with formatted name'), _('FORMAT')),
4546 _('print output to file with formatted name'), _('FORMAT')),
4547 ('', 'switch-parent', None, _('diff against the second parent')),
4547 ('', 'switch-parent', None, _('diff against the second parent')),
4548 ('r', 'rev', [],
4548 ('r', 'rev', [],
4549 _('revisions to export'), _('REV')),
4549 _('revisions to export'), _('REV')),
4550 ] + diffopts,
4550 ] + diffopts,
4551 _('[OPTION]... [-o OUTFILESPEC] REV...')),
4551 _('[OPTION]... [-o OUTFILESPEC] REV...')),
4552 "^forget":
4552 "^forget":
4553 (forget,
4553 (forget,
4554 [] + walkopts,
4554 [] + walkopts,
4555 _('[OPTION]... FILE...')),
4555 _('[OPTION]... FILE...')),
4556 "grep":
4556 "grep":
4557 (grep,
4557 (grep,
4558 [('0', 'print0', None, _('end fields with NUL')),
4558 [('0', 'print0', None, _('end fields with NUL')),
4559 ('', 'all', None, _('print all revisions that match')),
4559 ('', 'all', None, _('print all revisions that match')),
4560 ('a', 'text', None, _('treat all files as text')),
4560 ('a', 'text', None, _('treat all files as text')),
4561 ('f', 'follow', None,
4561 ('f', 'follow', None,
4562 _('follow changeset history,'
4562 _('follow changeset history,'
4563 ' or file history across copies and renames')),
4563 ' or file history across copies and renames')),
4564 ('i', 'ignore-case', None, _('ignore case when matching')),
4564 ('i', 'ignore-case', None, _('ignore case when matching')),
4565 ('l', 'files-with-matches', None,
4565 ('l', 'files-with-matches', None,
4566 _('print only filenames and revisions that match')),
4566 _('print only filenames and revisions that match')),
4567 ('n', 'line-number', None, _('print matching line numbers')),
4567 ('n', 'line-number', None, _('print matching line numbers')),
4568 ('r', 'rev', [],
4568 ('r', 'rev', [],
4569 _('only search files changed within revision range'), _('REV')),
4569 _('only search files changed within revision range'), _('REV')),
4570 ('u', 'user', None, _('list the author (long with -v)')),
4570 ('u', 'user', None, _('list the author (long with -v)')),
4571 ('d', 'date', None, _('list the date (short with -q)')),
4571 ('d', 'date', None, _('list the date (short with -q)')),
4572 ] + walkopts,
4572 ] + walkopts,
4573 _('[OPTION]... PATTERN [FILE]...')),
4573 _('[OPTION]... PATTERN [FILE]...')),
4574 "heads":
4574 "heads":
4575 (heads,
4575 (heads,
4576 [('r', 'rev', '',
4576 [('r', 'rev', '',
4577 _('show only heads which are descendants of STARTREV'),
4577 _('show only heads which are descendants of STARTREV'),
4578 _('STARTREV')),
4578 _('STARTREV')),
4579 ('t', 'topo', False, _('show topological heads only')),
4579 ('t', 'topo', False, _('show topological heads only')),
4580 ('a', 'active', False,
4580 ('a', 'active', False,
4581 _('show active branchheads only (DEPRECATED)')),
4581 _('show active branchheads only (DEPRECATED)')),
4582 ('c', 'closed', False,
4582 ('c', 'closed', False,
4583 _('show normal and closed branch heads')),
4583 _('show normal and closed branch heads')),
4584 ] + templateopts,
4584 ] + templateopts,
4585 _('[-ac] [-r STARTREV] [REV]...')),
4585 _('[-ac] [-r STARTREV] [REV]...')),
4586 "help": (help_, [], _('[TOPIC]')),
4586 "help": (help_, [], _('[TOPIC]')),
4587 "identify|id":
4587 "identify|id":
4588 (identify,
4588 (identify,
4589 [('r', 'rev', '',
4589 [('r', 'rev', '',
4590 _('identify the specified revision'), _('REV')),
4590 _('identify the specified revision'), _('REV')),
4591 ('n', 'num', None, _('show local revision number')),
4591 ('n', 'num', None, _('show local revision number')),
4592 ('i', 'id', None, _('show global revision id')),
4592 ('i', 'id', None, _('show global revision id')),
4593 ('b', 'branch', None, _('show branch')),
4593 ('b', 'branch', None, _('show branch')),
4594 ('t', 'tags', None, _('show tags')),
4594 ('t', 'tags', None, _('show tags')),
4595 ('B', 'bookmarks', None, _('show bookmarks'))],
4595 ('B', 'bookmarks', None, _('show bookmarks'))],
4596 _('[-nibtB] [-r REV] [SOURCE]')),
4596 _('[-nibtB] [-r REV] [SOURCE]')),
4597 "import|patch":
4597 "import|patch":
4598 (import_,
4598 (import_,
4599 [('p', 'strip', 1,
4599 [('p', 'strip', 1,
4600 _('directory strip option for patch. This has the same '
4600 _('directory strip option for patch. This has the same '
4601 'meaning as the corresponding patch option'),
4601 'meaning as the corresponding patch option'),
4602 _('NUM')),
4602 _('NUM')),
4603 ('b', 'base', '',
4603 ('b', 'base', '',
4604 _('base path'), _('PATH')),
4604 _('base path'), _('PATH')),
4605 ('f', 'force', None,
4605 ('f', 'force', None,
4606 _('skip check for outstanding uncommitted changes')),
4606 _('skip check for outstanding uncommitted changes')),
4607 ('', 'no-commit', None,
4607 ('', 'no-commit', None,
4608 _("don't commit, just update the working directory")),
4608 _("don't commit, just update the working directory")),
4609 ('', 'exact', None,
4609 ('', 'exact', None,
4610 _('apply patch to the nodes from which it was generated')),
4610 _('apply patch to the nodes from which it was generated')),
4611 ('', 'import-branch', None,
4611 ('', 'import-branch', None,
4612 _('use any branch information in patch (implied by --exact)'))] +
4612 _('use any branch information in patch (implied by --exact)'))] +
4613 commitopts + commitopts2 + similarityopts,
4613 commitopts + commitopts2 + similarityopts,
4614 _('[OPTION]... PATCH...')),
4614 _('[OPTION]... PATCH...')),
4615 "incoming|in":
4615 "incoming|in":
4616 (incoming,
4616 (incoming,
4617 [('f', 'force', None,
4617 [('f', 'force', None,
4618 _('run even if remote repository is unrelated')),
4618 _('run even if remote repository is unrelated')),
4619 ('n', 'newest-first', None, _('show newest record first')),
4619 ('n', 'newest-first', None, _('show newest record first')),
4620 ('', 'bundle', '',
4620 ('', 'bundle', '',
4621 _('file to store the bundles into'), _('FILE')),
4621 _('file to store the bundles into'), _('FILE')),
4622 ('r', 'rev', [],
4622 ('r', 'rev', [],
4623 _('a remote changeset intended to be added'), _('REV')),
4623 _('a remote changeset intended to be added'), _('REV')),
4624 ('B', 'bookmarks', False, _("compare bookmarks")),
4624 ('B', 'bookmarks', False, _("compare bookmarks")),
4625 ('b', 'branch', [],
4625 ('b', 'branch', [],
4626 _('a specific branch you would like to pull'), _('BRANCH')),
4626 _('a specific branch you would like to pull'), _('BRANCH')),
4627 ] + logopts + remoteopts + subrepoopts,
4627 ] + logopts + remoteopts + subrepoopts,
4628 _('[-p] [-n] [-M] [-f] [-r REV]...'
4628 _('[-p] [-n] [-M] [-f] [-r REV]...'
4629 ' [--bundle FILENAME] [SOURCE]')),
4629 ' [--bundle FILENAME] [SOURCE]')),
4630 "^init":
4630 "^init":
4631 (init,
4631 (init,
4632 remoteopts,
4632 remoteopts,
4633 _('[-e CMD] [--remotecmd CMD] [DEST]')),
4633 _('[-e CMD] [--remotecmd CMD] [DEST]')),
4634 "locate":
4634 "locate":
4635 (locate,
4635 (locate,
4636 [('r', 'rev', '',
4636 [('r', 'rev', '',
4637 _('search the repository as it is in REV'), _('REV')),
4637 _('search the repository as it is in REV'), _('REV')),
4638 ('0', 'print0', None,
4638 ('0', 'print0', None,
4639 _('end filenames with NUL, for use with xargs')),
4639 _('end filenames with NUL, for use with xargs')),
4640 ('f', 'fullpath', None,
4640 ('f', 'fullpath', None,
4641 _('print complete paths from the filesystem root')),
4641 _('print complete paths from the filesystem root')),
4642 ] + walkopts,
4642 ] + walkopts,
4643 _('[OPTION]... [PATTERN]...')),
4643 _('[OPTION]... [PATTERN]...')),
4644 "^log|history":
4644 "^log|history":
4645 (log,
4645 (log,
4646 [('f', 'follow', None,
4646 [('f', 'follow', None,
4647 _('follow changeset history,'
4647 _('follow changeset history,'
4648 ' or file history across copies and renames')),
4648 ' or file history across copies and renames')),
4649 ('', 'follow-first', None,
4649 ('', 'follow-first', None,
4650 _('only follow the first parent of merge changesets')),
4650 _('only follow the first parent of merge changesets')),
4651 ('d', 'date', '',
4651 ('d', 'date', '',
4652 _('show revisions matching date spec'), _('DATE')),
4652 _('show revisions matching date spec'), _('DATE')),
4653 ('C', 'copies', None, _('show copied files')),
4653 ('C', 'copies', None, _('show copied files')),
4654 ('k', 'keyword', [],
4654 ('k', 'keyword', [],
4655 _('do case-insensitive search for a given text'), _('TEXT')),
4655 _('do case-insensitive search for a given text'), _('TEXT')),
4656 ('r', 'rev', [],
4656 ('r', 'rev', [],
4657 _('show the specified revision or range'), _('REV')),
4657 _('show the specified revision or range'), _('REV')),
4658 ('', 'removed', None, _('include revisions where files were removed')),
4658 ('', 'removed', None, _('include revisions where files were removed')),
4659 ('m', 'only-merges', None, _('show only merges')),
4659 ('m', 'only-merges', None, _('show only merges')),
4660 ('u', 'user', [],
4660 ('u', 'user', [],
4661 _('revisions committed by user'), _('USER')),
4661 _('revisions committed by user'), _('USER')),
4662 ('', 'only-branch', [],
4662 ('', 'only-branch', [],
4663 _('show only changesets within the given named branch (DEPRECATED)'),
4663 _('show only changesets within the given named branch (DEPRECATED)'),
4664 _('BRANCH')),
4664 _('BRANCH')),
4665 ('b', 'branch', [],
4665 ('b', 'branch', [],
4666 _('show changesets within the given named branch'), _('BRANCH')),
4666 _('show changesets within the given named branch'), _('BRANCH')),
4667 ('P', 'prune', [],
4667 ('P', 'prune', [],
4668 _('do not display revision or any of its ancestors'), _('REV')),
4668 _('do not display revision or any of its ancestors'), _('REV')),
4669 ] + logopts + walkopts,
4669 ] + logopts + walkopts,
4670 _('[OPTION]... [FILE]')),
4670 _('[OPTION]... [FILE]')),
4671 "manifest":
4671 "manifest":
4672 (manifest,
4672 (manifest,
4673 [('r', 'rev', '',
4673 [('r', 'rev', '',
4674 _('revision to display'), _('REV'))],
4674 _('revision to display'), _('REV'))],
4675 _('[-r REV]')),
4675 _('[-r REV]')),
4676 "^merge":
4676 "^merge":
4677 (merge,
4677 (merge,
4678 [('f', 'force', None, _('force a merge with outstanding changes')),
4678 [('f', 'force', None, _('force a merge with outstanding changes')),
4679 ('t', 'tool', '', _('specify merge tool')),
4679 ('t', 'tool', '', _('specify merge tool')),
4680 ('r', 'rev', '',
4680 ('r', 'rev', '',
4681 _('revision to merge'), _('REV')),
4681 _('revision to merge'), _('REV')),
4682 ('P', 'preview', None,
4682 ('P', 'preview', None,
4683 _('review revisions to merge (no merge is performed)'))],
4683 _('review revisions to merge (no merge is performed)'))],
4684 _('[-P] [-f] [[-r] REV]')),
4684 _('[-P] [-f] [[-r] REV]')),
4685 "outgoing|out":
4685 "outgoing|out":
4686 (outgoing,
4686 (outgoing,
4687 [('f', 'force', None,
4687 [('f', 'force', None,
4688 _('run even when the destination is unrelated')),
4688 _('run even when the destination is unrelated')),
4689 ('r', 'rev', [],
4689 ('r', 'rev', [],
4690 _('a changeset intended to be included in the destination'),
4690 _('a changeset intended to be included in the destination'),
4691 _('REV')),
4691 _('REV')),
4692 ('n', 'newest-first', None, _('show newest record first')),
4692 ('n', 'newest-first', None, _('show newest record first')),
4693 ('B', 'bookmarks', False, _("compare bookmarks")),
4693 ('B', 'bookmarks', False, _("compare bookmarks")),
4694 ('b', 'branch', [],
4694 ('b', 'branch', [],
4695 _('a specific branch you would like to push'), _('BRANCH')),
4695 _('a specific branch you would like to push'), _('BRANCH')),
4696 ] + logopts + remoteopts + subrepoopts,
4696 ] + logopts + remoteopts + subrepoopts,
4697 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
4697 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
4698 "parents":
4698 "parents":
4699 (parents,
4699 (parents,
4700 [('r', 'rev', '',
4700 [('r', 'rev', '',
4701 _('show parents of the specified revision'), _('REV')),
4701 _('show parents of the specified revision'), _('REV')),
4702 ] + templateopts,
4702 ] + templateopts,
4703 _('[-r REV] [FILE]')),
4703 _('[-r REV] [FILE]')),
4704 "paths": (paths, [], _('[NAME]')),
4704 "paths": (paths, [], _('[NAME]')),
4705 "^pull":
4705 "^pull":
4706 (pull,
4706 (pull,
4707 [('u', 'update', None,
4707 [('u', 'update', None,
4708 _('update to new branch head if changesets were pulled')),
4708 _('update to new branch head if changesets were pulled')),
4709 ('f', 'force', None,
4709 ('f', 'force', None,
4710 _('run even when remote repository is unrelated')),
4710 _('run even when remote repository is unrelated')),
4711 ('r', 'rev', [],
4711 ('r', 'rev', [],
4712 _('a remote changeset intended to be added'), _('REV')),
4712 _('a remote changeset intended to be added'), _('REV')),
4713 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4713 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4714 ('b', 'branch', [],
4714 ('b', 'branch', [],
4715 _('a specific branch you would like to pull'), _('BRANCH')),
4715 _('a specific branch you would like to pull'), _('BRANCH')),
4716 ] + remoteopts,
4716 ] + remoteopts,
4717 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
4717 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
4718 "^push":
4718 "^push":
4719 (push,
4719 (push,
4720 [('f', 'force', None, _('force push')),
4720 [('f', 'force', None, _('force push')),
4721 ('r', 'rev', [],
4721 ('r', 'rev', [],
4722 _('a changeset intended to be included in the destination'),
4722 _('a changeset intended to be included in the destination'),
4723 _('REV')),
4723 _('REV')),
4724 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4724 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4725 ('b', 'branch', [],
4725 ('b', 'branch', [],
4726 _('a specific branch you would like to push'), _('BRANCH')),
4726 _('a specific branch you would like to push'), _('BRANCH')),
4727 ('', 'new-branch', False, _('allow pushing a new branch')),
4727 ('', 'new-branch', False, _('allow pushing a new branch')),
4728 ] + remoteopts,
4728 ] + remoteopts,
4729 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
4729 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
4730 "recover": (recover, []),
4730 "recover": (recover, []),
4731 "^remove|rm":
4731 "^remove|rm":
4732 (remove,
4732 (remove,
4733 [('A', 'after', None, _('record delete for missing files')),
4733 [('A', 'after', None, _('record delete for missing files')),
4734 ('f', 'force', None,
4734 ('f', 'force', None,
4735 _('remove (and delete) file even if added or modified')),
4735 _('remove (and delete) file even if added or modified')),
4736 ] + walkopts,
4736 ] + walkopts,
4737 _('[OPTION]... FILE...')),
4737 _('[OPTION]... FILE...')),
4738 "rename|move|mv":
4738 "rename|move|mv":
4739 (rename,
4739 (rename,
4740 [('A', 'after', None, _('record a rename that has already occurred')),
4740 [('A', 'after', None, _('record a rename that has already occurred')),
4741 ('f', 'force', None,
4741 ('f', 'force', None,
4742 _('forcibly copy over an existing managed file')),
4742 _('forcibly copy over an existing managed file')),
4743 ] + walkopts + dryrunopts,
4743 ] + walkopts + dryrunopts,
4744 _('[OPTION]... SOURCE... DEST')),
4744 _('[OPTION]... SOURCE... DEST')),
4745 "resolve":
4745 "resolve":
4746 (resolve,
4746 (resolve,
4747 [('a', 'all', None, _('select all unresolved files')),
4747 [('a', 'all', None, _('select all unresolved files')),
4748 ('l', 'list', None, _('list state of files needing merge')),
4748 ('l', 'list', None, _('list state of files needing merge')),
4749 ('m', 'mark', None, _('mark files as resolved')),
4749 ('m', 'mark', None, _('mark files as resolved')),
4750 ('u', 'unmark', None, _('mark files as unresolved')),
4750 ('u', 'unmark', None, _('mark files as unresolved')),
4751 ('t', 'tool', '', _('specify merge tool')),
4751 ('t', 'tool', '', _('specify merge tool')),
4752 ('n', 'no-status', None, _('hide status prefix'))]
4752 ('n', 'no-status', None, _('hide status prefix'))]
4753 + walkopts,
4753 + walkopts,
4754 _('[OPTION]... [FILE]...')),
4754 _('[OPTION]... [FILE]...')),
4755 "revert":
4755 "revert":
4756 (revert,
4756 (revert,
4757 [('a', 'all', None, _('revert all changes when no arguments given')),
4757 [('a', 'all', None, _('revert all changes when no arguments given')),
4758 ('d', 'date', '',
4758 ('d', 'date', '',
4759 _('tipmost revision matching date'), _('DATE')),
4759 _('tipmost revision matching date'), _('DATE')),
4760 ('r', 'rev', '',
4760 ('r', 'rev', '',
4761 _('revert to the specified revision'), _('REV')),
4761 _('revert to the specified revision'), _('REV')),
4762 ('', 'no-backup', None, _('do not save backup copies of files')),
4762 ('', 'no-backup', None, _('do not save backup copies of files')),
4763 ] + walkopts + dryrunopts,
4763 ] + walkopts + dryrunopts,
4764 _('[OPTION]... [-r REV] [NAME]...')),
4764 _('[OPTION]... [-r REV] [NAME]...')),
4765 "rollback": (rollback, dryrunopts),
4765 "rollback": (rollback, dryrunopts),
4766 "root": (root, []),
4766 "root": (root, []),
4767 "^serve":
4767 "^serve":
4768 (serve,
4768 (serve,
4769 [('A', 'accesslog', '',
4769 [('A', 'accesslog', '',
4770 _('name of access log file to write to'), _('FILE')),
4770 _('name of access log file to write to'), _('FILE')),
4771 ('d', 'daemon', None, _('run server in background')),
4771 ('d', 'daemon', None, _('run server in background')),
4772 ('', 'daemon-pipefds', '',
4772 ('', 'daemon-pipefds', '',
4773 _('used internally by daemon mode'), _('NUM')),
4773 _('used internally by daemon mode'), _('NUM')),
4774 ('E', 'errorlog', '',
4774 ('E', 'errorlog', '',
4775 _('name of error log file to write to'), _('FILE')),
4775 _('name of error log file to write to'), _('FILE')),
4776 # use string type, then we can check if something was passed
4776 # use string type, then we can check if something was passed
4777 ('p', 'port', '',
4777 ('p', 'port', '',
4778 _('port to listen on (default: 8000)'), _('PORT')),
4778 _('port to listen on (default: 8000)'), _('PORT')),
4779 ('a', 'address', '',
4779 ('a', 'address', '',
4780 _('address to listen on (default: all interfaces)'), _('ADDR')),
4780 _('address to listen on (default: all interfaces)'), _('ADDR')),
4781 ('', 'prefix', '',
4781 ('', 'prefix', '',
4782 _('prefix path to serve from (default: server root)'), _('PREFIX')),
4782 _('prefix path to serve from (default: server root)'), _('PREFIX')),
4783 ('n', 'name', '',
4783 ('n', 'name', '',
4784 _('name to show in web pages (default: working directory)'),
4784 _('name to show in web pages (default: working directory)'),
4785 _('NAME')),
4785 _('NAME')),
4786 ('', 'web-conf', '',
4786 ('', 'web-conf', '',
4787 _('name of the hgweb config file (see "hg help hgweb")'),
4787 _('name of the hgweb config file (see "hg help hgweb")'),
4788 _('FILE')),
4788 _('FILE')),
4789 ('', 'webdir-conf', '',
4789 ('', 'webdir-conf', '',
4790 _('name of the hgweb config file (DEPRECATED)'), _('FILE')),
4790 _('name of the hgweb config file (DEPRECATED)'), _('FILE')),
4791 ('', 'pid-file', '',
4791 ('', 'pid-file', '',
4792 _('name of file to write process ID to'), _('FILE')),
4792 _('name of file to write process ID to'), _('FILE')),
4793 ('', 'stdio', None, _('for remote clients')),
4793 ('', 'stdio', None, _('for remote clients')),
4794 ('t', 'templates', '',
4794 ('t', 'templates', '',
4795 _('web templates to use'), _('TEMPLATE')),
4795 _('web templates to use'), _('TEMPLATE')),
4796 ('', 'style', '',
4796 ('', 'style', '',
4797 _('template style to use'), _('STYLE')),
4797 _('template style to use'), _('STYLE')),
4798 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4798 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4799 ('', 'certificate', '',
4799 ('', 'certificate', '',
4800 _('SSL certificate file'), _('FILE'))],
4800 _('SSL certificate file'), _('FILE'))],
4801 _('[OPTION]...')),
4801 _('[OPTION]...')),
4802 "showconfig|debugconfig":
4802 "showconfig|debugconfig":
4803 (showconfig,
4803 (showconfig,
4804 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4804 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4805 _('[-u] [NAME]...')),
4805 _('[-u] [NAME]...')),
4806 "^summary|sum":
4806 "^summary|sum":
4807 (summary,
4807 (summary,
4808 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
4808 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
4809 "^status|st":
4809 "^status|st":
4810 (status,
4810 (status,
4811 [('A', 'all', None, _('show status of all files')),
4811 [('A', 'all', None, _('show status of all files')),
4812 ('m', 'modified', None, _('show only modified files')),
4812 ('m', 'modified', None, _('show only modified files')),
4813 ('a', 'added', None, _('show only added files')),
4813 ('a', 'added', None, _('show only added files')),
4814 ('r', 'removed', None, _('show only removed files')),
4814 ('r', 'removed', None, _('show only removed files')),
4815 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4815 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4816 ('c', 'clean', None, _('show only files without changes')),
4816 ('c', 'clean', None, _('show only files without changes')),
4817 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4817 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4818 ('i', 'ignored', None, _('show only ignored files')),
4818 ('i', 'ignored', None, _('show only ignored files')),
4819 ('n', 'no-status', None, _('hide status prefix')),
4819 ('n', 'no-status', None, _('hide status prefix')),
4820 ('C', 'copies', None, _('show source of copied files')),
4820 ('C', 'copies', None, _('show source of copied files')),
4821 ('0', 'print0', None,
4821 ('0', 'print0', None,
4822 _('end filenames with NUL, for use with xargs')),
4822 _('end filenames with NUL, for use with xargs')),
4823 ('', 'rev', [],
4823 ('', 'rev', [],
4824 _('show difference from revision'), _('REV')),
4824 _('show difference from revision'), _('REV')),
4825 ('', 'change', '',
4825 ('', 'change', '',
4826 _('list the changed files of a revision'), _('REV')),
4826 _('list the changed files of a revision'), _('REV')),
4827 ] + walkopts + subrepoopts,
4827 ] + walkopts + subrepoopts,
4828 _('[OPTION]... [FILE]...')),
4828 _('[OPTION]... [FILE]...')),
4829 "tag":
4829 "tag":
4830 (tag,
4830 (tag,
4831 [('f', 'force', None, _('force tag')),
4831 [('f', 'force', None, _('force tag')),
4832 ('l', 'local', None, _('make the tag local')),
4832 ('l', 'local', None, _('make the tag local')),
4833 ('r', 'rev', '',
4833 ('r', 'rev', '',
4834 _('revision to tag'), _('REV')),
4834 _('revision to tag'), _('REV')),
4835 ('', 'remove', None, _('remove a tag')),
4835 ('', 'remove', None, _('remove a tag')),
4836 # -l/--local is already there, commitopts cannot be used
4836 # -l/--local is already there, commitopts cannot be used
4837 ('e', 'edit', None, _('edit commit message')),
4837 ('e', 'edit', None, _('edit commit message')),
4838 ('m', 'message', '',
4838 ('m', 'message', '',
4839 _('use <text> as commit message'), _('TEXT')),
4839 _('use <text> as commit message'), _('TEXT')),
4840 ] + commitopts2,
4840 ] + commitopts2,
4841 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
4841 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
4842 "tags": (tags, [], ''),
4842 "tags": (tags, [], ''),
4843 "tip":
4843 "tip":
4844 (tip,
4844 (tip,
4845 [('p', 'patch', None, _('show patch')),
4845 [('p', 'patch', None, _('show patch')),
4846 ('g', 'git', None, _('use git extended diff format')),
4846 ('g', 'git', None, _('use git extended diff format')),
4847 ] + templateopts,
4847 ] + templateopts,
4848 _('[-p] [-g]')),
4848 _('[-p] [-g]')),
4849 "unbundle":
4849 "unbundle":
4850 (unbundle,
4850 (unbundle,
4851 [('u', 'update', None,
4851 [('u', 'update', None,
4852 _('update to new branch head if changesets were unbundled'))],
4852 _('update to new branch head if changesets were unbundled'))],
4853 _('[-u] FILE...')),
4853 _('[-u] FILE...')),
4854 "^update|up|checkout|co":
4854 "^update|up|checkout|co":
4855 (update,
4855 (update,
4856 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
4856 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
4857 ('c', 'check', None,
4857 ('c', 'check', None,
4858 _('update across branches if no uncommitted changes')),
4858 _('update across branches if no uncommitted changes')),
4859 ('d', 'date', '',
4859 ('d', 'date', '',
4860 _('tipmost revision matching date'), _('DATE')),
4860 _('tipmost revision matching date'), _('DATE')),
4861 ('r', 'rev', '',
4861 ('r', 'rev', '',
4862 _('revision'), _('REV'))],
4862 _('revision'), _('REV'))],
4863 _('[-c] [-C] [-d DATE] [[-r] REV]')),
4863 _('[-c] [-C] [-d DATE] [[-r] REV]')),
4864 "verify": (verify, []),
4864 "verify": (verify, []),
4865 "version": (version_, []),
4865 "version": (version_, []),
4866 }
4866 }
4867
4867
4868 norepo = ("clone init version help debugcommands debugcomplete"
4868 norepo = ("clone init version help debugcommands debugcomplete"
4869 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
4869 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
4870 " debugknown debuggetbundle debugbundle")
4870 " debugknown debuggetbundle debugbundle")
4871 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
4871 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
4872 " debugdata debugindex debugindexdot")
4872 " debugdata debugindex debugindexdot")
@@ -1,560 +1,560 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from i18n import _
9 from i18n import _
10 from lock import release
10 from lock import release
11 from node import hex, nullid
11 from node import hex, nullid
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo, bookmarks
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo, bookmarks
13 import lock, util, extensions, error, node
13 import lock, util, extensions, error, node
14 import cmdutil, discovery, url
14 import cmdutil, discovery
15 import merge as mergemod
15 import merge as mergemod
16 import verify as verifymod
16 import verify as verifymod
17 import errno, os, shutil
17 import errno, os, shutil
18
18
19 def _local(path):
19 def _local(path):
20 path = util.expandpath(url.localpath(path))
20 path = util.expandpath(util.localpath(path))
21 return (os.path.isfile(path) and bundlerepo or localrepo)
21 return (os.path.isfile(path) and bundlerepo or localrepo)
22
22
23 def addbranchrevs(lrepo, repo, branches, revs):
23 def addbranchrevs(lrepo, repo, branches, revs):
24 hashbranch, branches = branches
24 hashbranch, branches = branches
25 if not hashbranch and not branches:
25 if not hashbranch and not branches:
26 return revs or None, revs and revs[0] or None
26 return revs or None, revs and revs[0] or None
27 revs = revs and list(revs) or []
27 revs = revs and list(revs) or []
28 if not repo.capable('branchmap'):
28 if not repo.capable('branchmap'):
29 if branches:
29 if branches:
30 raise util.Abort(_("remote branch lookup not supported"))
30 raise util.Abort(_("remote branch lookup not supported"))
31 revs.append(hashbranch)
31 revs.append(hashbranch)
32 return revs, revs[0]
32 return revs, revs[0]
33 branchmap = repo.branchmap()
33 branchmap = repo.branchmap()
34
34
35 def primary(branch):
35 def primary(branch):
36 if branch == '.':
36 if branch == '.':
37 if not lrepo or not lrepo.local():
37 if not lrepo or not lrepo.local():
38 raise util.Abort(_("dirstate branch not accessible"))
38 raise util.Abort(_("dirstate branch not accessible"))
39 branch = lrepo.dirstate.branch()
39 branch = lrepo.dirstate.branch()
40 if branch in branchmap:
40 if branch in branchmap:
41 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
41 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
42 return True
42 return True
43 else:
43 else:
44 return False
44 return False
45
45
46 for branch in branches:
46 for branch in branches:
47 if not primary(branch):
47 if not primary(branch):
48 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
48 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
49 if hashbranch:
49 if hashbranch:
50 if not primary(hashbranch):
50 if not primary(hashbranch):
51 revs.append(hashbranch)
51 revs.append(hashbranch)
52 return revs, revs[0]
52 return revs, revs[0]
53
53
54 def parseurl(path, branches=None):
54 def parseurl(path, branches=None):
55 '''parse url#branch, returning (url, (branch, branches))'''
55 '''parse url#branch, returning (url, (branch, branches))'''
56
56
57 u = url.url(path)
57 u = util.url(path)
58 branch = None
58 branch = None
59 if u.fragment:
59 if u.fragment:
60 branch = u.fragment
60 branch = u.fragment
61 u.fragment = None
61 u.fragment = None
62 return str(u), (branch, branches or [])
62 return str(u), (branch, branches or [])
63
63
64 schemes = {
64 schemes = {
65 'bundle': bundlerepo,
65 'bundle': bundlerepo,
66 'file': _local,
66 'file': _local,
67 'http': httprepo,
67 'http': httprepo,
68 'https': httprepo,
68 'https': httprepo,
69 'ssh': sshrepo,
69 'ssh': sshrepo,
70 'static-http': statichttprepo,
70 'static-http': statichttprepo,
71 }
71 }
72
72
73 def _lookup(path):
73 def _lookup(path):
74 u = url.url(path)
74 u = util.url(path)
75 scheme = u.scheme or 'file'
75 scheme = u.scheme or 'file'
76 thing = schemes.get(scheme) or schemes['file']
76 thing = schemes.get(scheme) or schemes['file']
77 try:
77 try:
78 return thing(path)
78 return thing(path)
79 except TypeError:
79 except TypeError:
80 return thing
80 return thing
81
81
82 def islocal(repo):
82 def islocal(repo):
83 '''return true if repo or path is local'''
83 '''return true if repo or path is local'''
84 if isinstance(repo, str):
84 if isinstance(repo, str):
85 try:
85 try:
86 return _lookup(repo).islocal(repo)
86 return _lookup(repo).islocal(repo)
87 except AttributeError:
87 except AttributeError:
88 return False
88 return False
89 return repo.local()
89 return repo.local()
90
90
91 def repository(ui, path='', create=False):
91 def repository(ui, path='', create=False):
92 """return a repository object for the specified path"""
92 """return a repository object for the specified path"""
93 repo = _lookup(path).instance(ui, path, create)
93 repo = _lookup(path).instance(ui, path, create)
94 ui = getattr(repo, "ui", ui)
94 ui = getattr(repo, "ui", ui)
95 for name, module in extensions.extensions():
95 for name, module in extensions.extensions():
96 hook = getattr(module, 'reposetup', None)
96 hook = getattr(module, 'reposetup', None)
97 if hook:
97 if hook:
98 hook(ui, repo)
98 hook(ui, repo)
99 return repo
99 return repo
100
100
101 def defaultdest(source):
101 def defaultdest(source):
102 '''return default destination of clone if none is given'''
102 '''return default destination of clone if none is given'''
103 return os.path.basename(os.path.normpath(source))
103 return os.path.basename(os.path.normpath(source))
104
104
105 def share(ui, source, dest=None, update=True):
105 def share(ui, source, dest=None, update=True):
106 '''create a shared repository'''
106 '''create a shared repository'''
107
107
108 if not islocal(source):
108 if not islocal(source):
109 raise util.Abort(_('can only share local repositories'))
109 raise util.Abort(_('can only share local repositories'))
110
110
111 if not dest:
111 if not dest:
112 dest = defaultdest(source)
112 dest = defaultdest(source)
113 else:
113 else:
114 dest = ui.expandpath(dest)
114 dest = ui.expandpath(dest)
115
115
116 if isinstance(source, str):
116 if isinstance(source, str):
117 origsource = ui.expandpath(source)
117 origsource = ui.expandpath(source)
118 source, branches = parseurl(origsource)
118 source, branches = parseurl(origsource)
119 srcrepo = repository(ui, source)
119 srcrepo = repository(ui, source)
120 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
120 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
121 else:
121 else:
122 srcrepo = source
122 srcrepo = source
123 origsource = source = srcrepo.url()
123 origsource = source = srcrepo.url()
124 checkout = None
124 checkout = None
125
125
126 sharedpath = srcrepo.sharedpath # if our source is already sharing
126 sharedpath = srcrepo.sharedpath # if our source is already sharing
127
127
128 root = os.path.realpath(dest)
128 root = os.path.realpath(dest)
129 roothg = os.path.join(root, '.hg')
129 roothg = os.path.join(root, '.hg')
130
130
131 if os.path.exists(roothg):
131 if os.path.exists(roothg):
132 raise util.Abort(_('destination already exists'))
132 raise util.Abort(_('destination already exists'))
133
133
134 if not os.path.isdir(root):
134 if not os.path.isdir(root):
135 os.mkdir(root)
135 os.mkdir(root)
136 util.makedir(roothg, notindexed=True)
136 util.makedir(roothg, notindexed=True)
137
137
138 requirements = ''
138 requirements = ''
139 try:
139 try:
140 requirements = srcrepo.opener('requires').read()
140 requirements = srcrepo.opener('requires').read()
141 except IOError, inst:
141 except IOError, inst:
142 if inst.errno != errno.ENOENT:
142 if inst.errno != errno.ENOENT:
143 raise
143 raise
144
144
145 requirements += 'shared\n'
145 requirements += 'shared\n'
146 file(os.path.join(roothg, 'requires'), 'w').write(requirements)
146 file(os.path.join(roothg, 'requires'), 'w').write(requirements)
147 file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
147 file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
148
148
149 default = srcrepo.ui.config('paths', 'default')
149 default = srcrepo.ui.config('paths', 'default')
150 if default:
150 if default:
151 f = file(os.path.join(roothg, 'hgrc'), 'w')
151 f = file(os.path.join(roothg, 'hgrc'), 'w')
152 f.write('[paths]\ndefault = %s\n' % default)
152 f.write('[paths]\ndefault = %s\n' % default)
153 f.close()
153 f.close()
154
154
155 r = repository(ui, root)
155 r = repository(ui, root)
156
156
157 if update:
157 if update:
158 r.ui.status(_("updating working directory\n"))
158 r.ui.status(_("updating working directory\n"))
159 if update is not True:
159 if update is not True:
160 checkout = update
160 checkout = update
161 for test in (checkout, 'default', 'tip'):
161 for test in (checkout, 'default', 'tip'):
162 if test is None:
162 if test is None:
163 continue
163 continue
164 try:
164 try:
165 uprev = r.lookup(test)
165 uprev = r.lookup(test)
166 break
166 break
167 except error.RepoLookupError:
167 except error.RepoLookupError:
168 continue
168 continue
169 _update(r, uprev)
169 _update(r, uprev)
170
170
171 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
171 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
172 stream=False, branch=None):
172 stream=False, branch=None):
173 """Make a copy of an existing repository.
173 """Make a copy of an existing repository.
174
174
175 Create a copy of an existing repository in a new directory. The
175 Create a copy of an existing repository in a new directory. The
176 source and destination are URLs, as passed to the repository
176 source and destination are URLs, as passed to the repository
177 function. Returns a pair of repository objects, the source and
177 function. Returns a pair of repository objects, the source and
178 newly created destination.
178 newly created destination.
179
179
180 The location of the source is added to the new repository's
180 The location of the source is added to the new repository's
181 .hg/hgrc file, as the default to be used for future pulls and
181 .hg/hgrc file, as the default to be used for future pulls and
182 pushes.
182 pushes.
183
183
184 If an exception is raised, the partly cloned/updated destination
184 If an exception is raised, the partly cloned/updated destination
185 repository will be deleted.
185 repository will be deleted.
186
186
187 Arguments:
187 Arguments:
188
188
189 source: repository object or URL
189 source: repository object or URL
190
190
191 dest: URL of destination repository to create (defaults to base
191 dest: URL of destination repository to create (defaults to base
192 name of source repository)
192 name of source repository)
193
193
194 pull: always pull from source repository, even in local case
194 pull: always pull from source repository, even in local case
195
195
196 stream: stream raw data uncompressed from repository (fast over
196 stream: stream raw data uncompressed from repository (fast over
197 LAN, slow over WAN)
197 LAN, slow over WAN)
198
198
199 rev: revision to clone up to (implies pull=True)
199 rev: revision to clone up to (implies pull=True)
200
200
201 update: update working directory after clone completes, if
201 update: update working directory after clone completes, if
202 destination is local repository (True means update to default rev,
202 destination is local repository (True means update to default rev,
203 anything else is treated as a revision)
203 anything else is treated as a revision)
204
204
205 branch: branches to clone
205 branch: branches to clone
206 """
206 """
207
207
208 if isinstance(source, str):
208 if isinstance(source, str):
209 origsource = ui.expandpath(source)
209 origsource = ui.expandpath(source)
210 source, branch = parseurl(origsource, branch)
210 source, branch = parseurl(origsource, branch)
211 src_repo = repository(ui, source)
211 src_repo = repository(ui, source)
212 else:
212 else:
213 src_repo = source
213 src_repo = source
214 branch = (None, branch or [])
214 branch = (None, branch or [])
215 origsource = source = src_repo.url()
215 origsource = source = src_repo.url()
216 rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
216 rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
217
217
218 if dest is None:
218 if dest is None:
219 dest = defaultdest(source)
219 dest = defaultdest(source)
220 ui.status(_("destination directory: %s\n") % dest)
220 ui.status(_("destination directory: %s\n") % dest)
221 else:
221 else:
222 dest = ui.expandpath(dest)
222 dest = ui.expandpath(dest)
223
223
224 dest = url.localpath(dest)
224 dest = util.localpath(dest)
225 source = url.localpath(source)
225 source = util.localpath(source)
226
226
227 if os.path.exists(dest):
227 if os.path.exists(dest):
228 if not os.path.isdir(dest):
228 if not os.path.isdir(dest):
229 raise util.Abort(_("destination '%s' already exists") % dest)
229 raise util.Abort(_("destination '%s' already exists") % dest)
230 elif os.listdir(dest):
230 elif os.listdir(dest):
231 raise util.Abort(_("destination '%s' is not empty") % dest)
231 raise util.Abort(_("destination '%s' is not empty") % dest)
232
232
233 class DirCleanup(object):
233 class DirCleanup(object):
234 def __init__(self, dir_):
234 def __init__(self, dir_):
235 self.rmtree = shutil.rmtree
235 self.rmtree = shutil.rmtree
236 self.dir_ = dir_
236 self.dir_ = dir_
237 def close(self):
237 def close(self):
238 self.dir_ = None
238 self.dir_ = None
239 def cleanup(self):
239 def cleanup(self):
240 if self.dir_:
240 if self.dir_:
241 self.rmtree(self.dir_, True)
241 self.rmtree(self.dir_, True)
242
242
243 src_lock = dest_lock = dir_cleanup = None
243 src_lock = dest_lock = dir_cleanup = None
244 try:
244 try:
245 if islocal(dest):
245 if islocal(dest):
246 dir_cleanup = DirCleanup(dest)
246 dir_cleanup = DirCleanup(dest)
247
247
248 abspath = origsource
248 abspath = origsource
249 copy = False
249 copy = False
250 if src_repo.cancopy() and islocal(dest):
250 if src_repo.cancopy() and islocal(dest):
251 abspath = os.path.abspath(url.localpath(origsource))
251 abspath = os.path.abspath(util.localpath(origsource))
252 copy = not pull and not rev
252 copy = not pull and not rev
253
253
254 if copy:
254 if copy:
255 try:
255 try:
256 # we use a lock here because if we race with commit, we
256 # we use a lock here because if we race with commit, we
257 # can end up with extra data in the cloned revlogs that's
257 # can end up with extra data in the cloned revlogs that's
258 # not pointed to by changesets, thus causing verify to
258 # not pointed to by changesets, thus causing verify to
259 # fail
259 # fail
260 src_lock = src_repo.lock(wait=False)
260 src_lock = src_repo.lock(wait=False)
261 except error.LockError:
261 except error.LockError:
262 copy = False
262 copy = False
263
263
264 if copy:
264 if copy:
265 src_repo.hook('preoutgoing', throw=True, source='clone')
265 src_repo.hook('preoutgoing', throw=True, source='clone')
266 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
266 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
267 if not os.path.exists(dest):
267 if not os.path.exists(dest):
268 os.mkdir(dest)
268 os.mkdir(dest)
269 else:
269 else:
270 # only clean up directories we create ourselves
270 # only clean up directories we create ourselves
271 dir_cleanup.dir_ = hgdir
271 dir_cleanup.dir_ = hgdir
272 try:
272 try:
273 dest_path = hgdir
273 dest_path = hgdir
274 util.makedir(dest_path, notindexed=True)
274 util.makedir(dest_path, notindexed=True)
275 except OSError, inst:
275 except OSError, inst:
276 if inst.errno == errno.EEXIST:
276 if inst.errno == errno.EEXIST:
277 dir_cleanup.close()
277 dir_cleanup.close()
278 raise util.Abort(_("destination '%s' already exists")
278 raise util.Abort(_("destination '%s' already exists")
279 % dest)
279 % dest)
280 raise
280 raise
281
281
282 hardlink = None
282 hardlink = None
283 num = 0
283 num = 0
284 for f in src_repo.store.copylist():
284 for f in src_repo.store.copylist():
285 src = os.path.join(src_repo.sharedpath, f)
285 src = os.path.join(src_repo.sharedpath, f)
286 dst = os.path.join(dest_path, f)
286 dst = os.path.join(dest_path, f)
287 dstbase = os.path.dirname(dst)
287 dstbase = os.path.dirname(dst)
288 if dstbase and not os.path.exists(dstbase):
288 if dstbase and not os.path.exists(dstbase):
289 os.mkdir(dstbase)
289 os.mkdir(dstbase)
290 if os.path.exists(src):
290 if os.path.exists(src):
291 if dst.endswith('data'):
291 if dst.endswith('data'):
292 # lock to avoid premature writing to the target
292 # lock to avoid premature writing to the target
293 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
293 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
294 hardlink, n = util.copyfiles(src, dst, hardlink)
294 hardlink, n = util.copyfiles(src, dst, hardlink)
295 num += n
295 num += n
296 if hardlink:
296 if hardlink:
297 ui.debug("linked %d files\n" % num)
297 ui.debug("linked %d files\n" % num)
298 else:
298 else:
299 ui.debug("copied %d files\n" % num)
299 ui.debug("copied %d files\n" % num)
300
300
301 # we need to re-init the repo after manually copying the data
301 # we need to re-init the repo after manually copying the data
302 # into it
302 # into it
303 dest_repo = repository(ui, dest)
303 dest_repo = repository(ui, dest)
304 src_repo.hook('outgoing', source='clone',
304 src_repo.hook('outgoing', source='clone',
305 node=node.hex(node.nullid))
305 node=node.hex(node.nullid))
306 else:
306 else:
307 try:
307 try:
308 dest_repo = repository(ui, dest, create=True)
308 dest_repo = repository(ui, dest, create=True)
309 except OSError, inst:
309 except OSError, inst:
310 if inst.errno == errno.EEXIST:
310 if inst.errno == errno.EEXIST:
311 dir_cleanup.close()
311 dir_cleanup.close()
312 raise util.Abort(_("destination '%s' already exists")
312 raise util.Abort(_("destination '%s' already exists")
313 % dest)
313 % dest)
314 raise
314 raise
315
315
316 revs = None
316 revs = None
317 if rev:
317 if rev:
318 if 'lookup' not in src_repo.capabilities:
318 if 'lookup' not in src_repo.capabilities:
319 raise util.Abort(_("src repository does not support "
319 raise util.Abort(_("src repository does not support "
320 "revision lookup and so doesn't "
320 "revision lookup and so doesn't "
321 "support clone by revision"))
321 "support clone by revision"))
322 revs = [src_repo.lookup(r) for r in rev]
322 revs = [src_repo.lookup(r) for r in rev]
323 checkout = revs[0]
323 checkout = revs[0]
324 if dest_repo.local():
324 if dest_repo.local():
325 dest_repo.clone(src_repo, heads=revs, stream=stream)
325 dest_repo.clone(src_repo, heads=revs, stream=stream)
326 elif src_repo.local():
326 elif src_repo.local():
327 src_repo.push(dest_repo, revs=revs)
327 src_repo.push(dest_repo, revs=revs)
328 else:
328 else:
329 raise util.Abort(_("clone from remote to remote not supported"))
329 raise util.Abort(_("clone from remote to remote not supported"))
330
330
331 if dir_cleanup:
331 if dir_cleanup:
332 dir_cleanup.close()
332 dir_cleanup.close()
333
333
334 if dest_repo.local():
334 if dest_repo.local():
335 fp = dest_repo.opener("hgrc", "w", text=True)
335 fp = dest_repo.opener("hgrc", "w", text=True)
336 fp.write("[paths]\n")
336 fp.write("[paths]\n")
337 fp.write("default = %s\n" % abspath)
337 fp.write("default = %s\n" % abspath)
338 fp.close()
338 fp.close()
339
339
340 dest_repo.ui.setconfig('paths', 'default', abspath)
340 dest_repo.ui.setconfig('paths', 'default', abspath)
341
341
342 if update:
342 if update:
343 if update is not True:
343 if update is not True:
344 checkout = update
344 checkout = update
345 if src_repo.local():
345 if src_repo.local():
346 checkout = src_repo.lookup(update)
346 checkout = src_repo.lookup(update)
347 for test in (checkout, 'default', 'tip'):
347 for test in (checkout, 'default', 'tip'):
348 if test is None:
348 if test is None:
349 continue
349 continue
350 try:
350 try:
351 uprev = dest_repo.lookup(test)
351 uprev = dest_repo.lookup(test)
352 break
352 break
353 except error.RepoLookupError:
353 except error.RepoLookupError:
354 continue
354 continue
355 bn = dest_repo[uprev].branch()
355 bn = dest_repo[uprev].branch()
356 dest_repo.ui.status(_("updating to branch %s\n") % bn)
356 dest_repo.ui.status(_("updating to branch %s\n") % bn)
357 _update(dest_repo, uprev)
357 _update(dest_repo, uprev)
358
358
359 # clone all bookmarks
359 # clone all bookmarks
360 if dest_repo.local() and src_repo.capable("pushkey"):
360 if dest_repo.local() and src_repo.capable("pushkey"):
361 rb = src_repo.listkeys('bookmarks')
361 rb = src_repo.listkeys('bookmarks')
362 for k, n in rb.iteritems():
362 for k, n in rb.iteritems():
363 try:
363 try:
364 m = dest_repo.lookup(n)
364 m = dest_repo.lookup(n)
365 dest_repo._bookmarks[k] = m
365 dest_repo._bookmarks[k] = m
366 except error.RepoLookupError:
366 except error.RepoLookupError:
367 pass
367 pass
368 if rb:
368 if rb:
369 bookmarks.write(dest_repo)
369 bookmarks.write(dest_repo)
370 elif src_repo.local() and dest_repo.capable("pushkey"):
370 elif src_repo.local() and dest_repo.capable("pushkey"):
371 for k, n in src_repo._bookmarks.iteritems():
371 for k, n in src_repo._bookmarks.iteritems():
372 dest_repo.pushkey('bookmarks', k, '', hex(n))
372 dest_repo.pushkey('bookmarks', k, '', hex(n))
373
373
374 return src_repo, dest_repo
374 return src_repo, dest_repo
375 finally:
375 finally:
376 release(src_lock, dest_lock)
376 release(src_lock, dest_lock)
377 if dir_cleanup is not None:
377 if dir_cleanup is not None:
378 dir_cleanup.cleanup()
378 dir_cleanup.cleanup()
379
379
380 def _showstats(repo, stats):
380 def _showstats(repo, stats):
381 repo.ui.status(_("%d files updated, %d files merged, "
381 repo.ui.status(_("%d files updated, %d files merged, "
382 "%d files removed, %d files unresolved\n") % stats)
382 "%d files removed, %d files unresolved\n") % stats)
383
383
384 def update(repo, node):
384 def update(repo, node):
385 """update the working directory to node, merging linear changes"""
385 """update the working directory to node, merging linear changes"""
386 stats = mergemod.update(repo, node, False, False, None)
386 stats = mergemod.update(repo, node, False, False, None)
387 _showstats(repo, stats)
387 _showstats(repo, stats)
388 if stats[3]:
388 if stats[3]:
389 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
389 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
390 return stats[3] > 0
390 return stats[3] > 0
391
391
392 # naming conflict in clone()
392 # naming conflict in clone()
393 _update = update
393 _update = update
394
394
395 def clean(repo, node, show_stats=True):
395 def clean(repo, node, show_stats=True):
396 """forcibly switch the working directory to node, clobbering changes"""
396 """forcibly switch the working directory to node, clobbering changes"""
397 stats = mergemod.update(repo, node, False, True, None)
397 stats = mergemod.update(repo, node, False, True, None)
398 if show_stats:
398 if show_stats:
399 _showstats(repo, stats)
399 _showstats(repo, stats)
400 return stats[3] > 0
400 return stats[3] > 0
401
401
402 def merge(repo, node, force=None, remind=True):
402 def merge(repo, node, force=None, remind=True):
403 """Branch merge with node, resolving changes. Return true if any
403 """Branch merge with node, resolving changes. Return true if any
404 unresolved conflicts."""
404 unresolved conflicts."""
405 stats = mergemod.update(repo, node, True, force, False)
405 stats = mergemod.update(repo, node, True, force, False)
406 _showstats(repo, stats)
406 _showstats(repo, stats)
407 if stats[3]:
407 if stats[3]:
408 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
408 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
409 "or 'hg update -C .' to abandon\n"))
409 "or 'hg update -C .' to abandon\n"))
410 elif remind:
410 elif remind:
411 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
411 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
412 return stats[3] > 0
412 return stats[3] > 0
413
413
414 def _incoming(displaychlist, subreporecurse, ui, repo, source,
414 def _incoming(displaychlist, subreporecurse, ui, repo, source,
415 opts, buffered=False):
415 opts, buffered=False):
416 """
416 """
417 Helper for incoming / gincoming.
417 Helper for incoming / gincoming.
418 displaychlist gets called with
418 displaychlist gets called with
419 (remoterepo, incomingchangesetlist, displayer) parameters,
419 (remoterepo, incomingchangesetlist, displayer) parameters,
420 and is supposed to contain only code that can't be unified.
420 and is supposed to contain only code that can't be unified.
421 """
421 """
422 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
422 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
423 other = repository(remoteui(repo, opts), source)
423 other = repository(remoteui(repo, opts), source)
424 ui.status(_('comparing with %s\n') % url.hidepassword(source))
424 ui.status(_('comparing with %s\n') % util.hidepassword(source))
425 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
425 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
426
426
427 if revs:
427 if revs:
428 revs = [other.lookup(rev) for rev in revs]
428 revs = [other.lookup(rev) for rev in revs]
429 other, common, anyinc, bundle = bundlerepo.getremotechanges(ui, repo, other,
429 other, common, anyinc, bundle = bundlerepo.getremotechanges(ui, repo, other,
430 revs, opts["bundle"], opts["force"])
430 revs, opts["bundle"], opts["force"])
431 if not anyinc:
431 if not anyinc:
432 ui.status(_("no changes found\n"))
432 ui.status(_("no changes found\n"))
433 return subreporecurse()
433 return subreporecurse()
434
434
435 try:
435 try:
436 chlist = other.changelog.findmissing(common, revs)
436 chlist = other.changelog.findmissing(common, revs)
437 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
437 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
438
438
439 # XXX once graphlog extension makes it into core,
439 # XXX once graphlog extension makes it into core,
440 # should be replaced by a if graph/else
440 # should be replaced by a if graph/else
441 displaychlist(other, chlist, displayer)
441 displaychlist(other, chlist, displayer)
442
442
443 displayer.close()
443 displayer.close()
444 finally:
444 finally:
445 if hasattr(other, 'close'):
445 if hasattr(other, 'close'):
446 other.close()
446 other.close()
447 if bundle:
447 if bundle:
448 os.unlink(bundle)
448 os.unlink(bundle)
449 subreporecurse()
449 subreporecurse()
450 return 0 # exit code is zero since we found incoming changes
450 return 0 # exit code is zero since we found incoming changes
451
451
452 def incoming(ui, repo, source, opts):
452 def incoming(ui, repo, source, opts):
453 def subreporecurse():
453 def subreporecurse():
454 ret = 1
454 ret = 1
455 if opts.get('subrepos'):
455 if opts.get('subrepos'):
456 ctx = repo[None]
456 ctx = repo[None]
457 for subpath in sorted(ctx.substate):
457 for subpath in sorted(ctx.substate):
458 sub = ctx.sub(subpath)
458 sub = ctx.sub(subpath)
459 ret = min(ret, sub.incoming(ui, source, opts))
459 ret = min(ret, sub.incoming(ui, source, opts))
460 return ret
460 return ret
461
461
462 def display(other, chlist, displayer):
462 def display(other, chlist, displayer):
463 limit = cmdutil.loglimit(opts)
463 limit = cmdutil.loglimit(opts)
464 if opts.get('newest_first'):
464 if opts.get('newest_first'):
465 chlist.reverse()
465 chlist.reverse()
466 count = 0
466 count = 0
467 for n in chlist:
467 for n in chlist:
468 if limit is not None and count >= limit:
468 if limit is not None and count >= limit:
469 break
469 break
470 parents = [p for p in other.changelog.parents(n) if p != nullid]
470 parents = [p for p in other.changelog.parents(n) if p != nullid]
471 if opts.get('no_merges') and len(parents) == 2:
471 if opts.get('no_merges') and len(parents) == 2:
472 continue
472 continue
473 count += 1
473 count += 1
474 displayer.show(other[n])
474 displayer.show(other[n])
475 return _incoming(display, subreporecurse, ui, repo, source, opts)
475 return _incoming(display, subreporecurse, ui, repo, source, opts)
476
476
477 def _outgoing(ui, repo, dest, opts):
477 def _outgoing(ui, repo, dest, opts):
478 dest = ui.expandpath(dest or 'default-push', dest or 'default')
478 dest = ui.expandpath(dest or 'default-push', dest or 'default')
479 dest, branches = parseurl(dest, opts.get('branch'))
479 dest, branches = parseurl(dest, opts.get('branch'))
480 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
480 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
481 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
481 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
482 if revs:
482 if revs:
483 revs = [repo.lookup(rev) for rev in revs]
483 revs = [repo.lookup(rev) for rev in revs]
484
484
485 other = repository(remoteui(repo, opts), dest)
485 other = repository(remoteui(repo, opts), dest)
486 inc = discovery.findcommonincoming(repo, other, force=opts.get('force'))
486 inc = discovery.findcommonincoming(repo, other, force=opts.get('force'))
487 common, _anyinc, _heads = inc
487 common, _anyinc, _heads = inc
488 o = repo.changelog.findmissing(common, revs)
488 o = repo.changelog.findmissing(common, revs)
489 if not o:
489 if not o:
490 ui.status(_("no changes found\n"))
490 ui.status(_("no changes found\n"))
491 return None
491 return None
492 return o
492 return o
493
493
494 def outgoing(ui, repo, dest, opts):
494 def outgoing(ui, repo, dest, opts):
495 def recurse():
495 def recurse():
496 ret = 1
496 ret = 1
497 if opts.get('subrepos'):
497 if opts.get('subrepos'):
498 ctx = repo[None]
498 ctx = repo[None]
499 for subpath in sorted(ctx.substate):
499 for subpath in sorted(ctx.substate):
500 sub = ctx.sub(subpath)
500 sub = ctx.sub(subpath)
501 ret = min(ret, sub.outgoing(ui, dest, opts))
501 ret = min(ret, sub.outgoing(ui, dest, opts))
502 return ret
502 return ret
503
503
504 limit = cmdutil.loglimit(opts)
504 limit = cmdutil.loglimit(opts)
505 o = _outgoing(ui, repo, dest, opts)
505 o = _outgoing(ui, repo, dest, opts)
506 if o is None:
506 if o is None:
507 return recurse()
507 return recurse()
508
508
509 if opts.get('newest_first'):
509 if opts.get('newest_first'):
510 o.reverse()
510 o.reverse()
511 displayer = cmdutil.show_changeset(ui, repo, opts)
511 displayer = cmdutil.show_changeset(ui, repo, opts)
512 count = 0
512 count = 0
513 for n in o:
513 for n in o:
514 if limit is not None and count >= limit:
514 if limit is not None and count >= limit:
515 break
515 break
516 parents = [p for p in repo.changelog.parents(n) if p != nullid]
516 parents = [p for p in repo.changelog.parents(n) if p != nullid]
517 if opts.get('no_merges') and len(parents) == 2:
517 if opts.get('no_merges') and len(parents) == 2:
518 continue
518 continue
519 count += 1
519 count += 1
520 displayer.show(repo[n])
520 displayer.show(repo[n])
521 displayer.close()
521 displayer.close()
522 recurse()
522 recurse()
523 return 0 # exit code is zero since we found outgoing changes
523 return 0 # exit code is zero since we found outgoing changes
524
524
525 def revert(repo, node, choose):
525 def revert(repo, node, choose):
526 """revert changes to revision in node without updating dirstate"""
526 """revert changes to revision in node without updating dirstate"""
527 return mergemod.update(repo, node, False, True, choose)[3] > 0
527 return mergemod.update(repo, node, False, True, choose)[3] > 0
528
528
529 def verify(repo):
529 def verify(repo):
530 """verify the consistency of a repository"""
530 """verify the consistency of a repository"""
531 return verifymod.verify(repo)
531 return verifymod.verify(repo)
532
532
533 def remoteui(src, opts):
533 def remoteui(src, opts):
534 'build a remote ui from ui or repo and opts'
534 'build a remote ui from ui or repo and opts'
535 if hasattr(src, 'baseui'): # looks like a repository
535 if hasattr(src, 'baseui'): # looks like a repository
536 dst = src.baseui.copy() # drop repo-specific config
536 dst = src.baseui.copy() # drop repo-specific config
537 src = src.ui # copy target options from repo
537 src = src.ui # copy target options from repo
538 else: # assume it's a global ui object
538 else: # assume it's a global ui object
539 dst = src.copy() # keep all global options
539 dst = src.copy() # keep all global options
540
540
541 # copy ssh-specific options
541 # copy ssh-specific options
542 for o in 'ssh', 'remotecmd':
542 for o in 'ssh', 'remotecmd':
543 v = opts.get(o) or src.config('ui', o)
543 v = opts.get(o) or src.config('ui', o)
544 if v:
544 if v:
545 dst.setconfig("ui", o, v)
545 dst.setconfig("ui", o, v)
546
546
547 # copy bundle-specific options
547 # copy bundle-specific options
548 r = src.config('bundle', 'mainreporoot')
548 r = src.config('bundle', 'mainreporoot')
549 if r:
549 if r:
550 dst.setconfig('bundle', 'mainreporoot', r)
550 dst.setconfig('bundle', 'mainreporoot', r)
551
551
552 # copy selected local settings to the remote ui
552 # copy selected local settings to the remote ui
553 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
553 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
554 for key, val in src.configitems(sect):
554 for key, val in src.configitems(sect):
555 dst.setconfig(sect, key, val)
555 dst.setconfig(sect, key, val)
556 v = src.config('web', 'cacerts')
556 v = src.config('web', 'cacerts')
557 if v:
557 if v:
558 dst.setconfig('web', 'cacerts', util.expandpath(v))
558 dst.setconfig('web', 'cacerts', util.expandpath(v))
559
559
560 return dst
560 return dst
@@ -1,371 +1,371 b''
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import os, re, time
9 import os, re, time
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import ui, hg, scmutil, util, templater
11 from mercurial import ui, hg, scmutil, util, templater
12 from mercurial import error, encoding, url
12 from mercurial import error, encoding
13 from common import ErrorResponse, get_mtime, staticfile, paritygen, \
13 from common import ErrorResponse, get_mtime, staticfile, paritygen, \
14 get_contact, HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
14 get_contact, HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
15 from hgweb_mod import hgweb
15 from hgweb_mod import hgweb
16 from request import wsgirequest
16 from request import wsgirequest
17 import webutil
17 import webutil
18
18
19 def cleannames(items):
19 def cleannames(items):
20 return [(util.pconvert(name).strip('/'), path) for name, path in items]
20 return [(util.pconvert(name).strip('/'), path) for name, path in items]
21
21
22 def findrepos(paths):
22 def findrepos(paths):
23 repos = []
23 repos = []
24 for prefix, root in cleannames(paths):
24 for prefix, root in cleannames(paths):
25 roothead, roottail = os.path.split(root)
25 roothead, roottail = os.path.split(root)
26 # "foo = /bar/*" makes every subrepo of /bar/ to be
26 # "foo = /bar/*" makes every subrepo of /bar/ to be
27 # mounted as foo/subrepo
27 # mounted as foo/subrepo
28 # and "foo = /bar/**" also recurses into the subdirectories,
28 # and "foo = /bar/**" also recurses into the subdirectories,
29 # remember to use it without working dir.
29 # remember to use it without working dir.
30 try:
30 try:
31 recurse = {'*': False, '**': True}[roottail]
31 recurse = {'*': False, '**': True}[roottail]
32 except KeyError:
32 except KeyError:
33 repos.append((prefix, root))
33 repos.append((prefix, root))
34 continue
34 continue
35 roothead = os.path.normpath(os.path.abspath(roothead))
35 roothead = os.path.normpath(os.path.abspath(roothead))
36 paths = scmutil.walkrepos(roothead, followsym=True, recurse=recurse)
36 paths = scmutil.walkrepos(roothead, followsym=True, recurse=recurse)
37 repos.extend(urlrepos(prefix, roothead, paths))
37 repos.extend(urlrepos(prefix, roothead, paths))
38 return repos
38 return repos
39
39
40 def urlrepos(prefix, roothead, paths):
40 def urlrepos(prefix, roothead, paths):
41 """yield url paths and filesystem paths from a list of repo paths
41 """yield url paths and filesystem paths from a list of repo paths
42
42
43 >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq]
43 >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq]
44 >>> conv(urlrepos('hg', '/opt', ['/opt/r', '/opt/r/r', '/opt']))
44 >>> conv(urlrepos('hg', '/opt', ['/opt/r', '/opt/r/r', '/opt']))
45 [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')]
45 [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')]
46 >>> conv(urlrepos('', '/opt', ['/opt/r', '/opt/r/r', '/opt']))
46 >>> conv(urlrepos('', '/opt', ['/opt/r', '/opt/r/r', '/opt']))
47 [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')]
47 [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')]
48 """
48 """
49 for path in paths:
49 for path in paths:
50 path = os.path.normpath(path)
50 path = os.path.normpath(path)
51 yield (prefix + '/' +
51 yield (prefix + '/' +
52 util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path
52 util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path
53
53
54 class hgwebdir(object):
54 class hgwebdir(object):
55 refreshinterval = 20
55 refreshinterval = 20
56
56
57 def __init__(self, conf, baseui=None):
57 def __init__(self, conf, baseui=None):
58 self.conf = conf
58 self.conf = conf
59 self.baseui = baseui
59 self.baseui = baseui
60 self.lastrefresh = 0
60 self.lastrefresh = 0
61 self.motd = None
61 self.motd = None
62 self.refresh()
62 self.refresh()
63
63
64 def refresh(self):
64 def refresh(self):
65 if self.lastrefresh + self.refreshinterval > time.time():
65 if self.lastrefresh + self.refreshinterval > time.time():
66 return
66 return
67
67
68 if self.baseui:
68 if self.baseui:
69 u = self.baseui.copy()
69 u = self.baseui.copy()
70 else:
70 else:
71 u = ui.ui()
71 u = ui.ui()
72 u.setconfig('ui', 'report_untrusted', 'off')
72 u.setconfig('ui', 'report_untrusted', 'off')
73 u.setconfig('ui', 'interactive', 'off')
73 u.setconfig('ui', 'interactive', 'off')
74
74
75 if not isinstance(self.conf, (dict, list, tuple)):
75 if not isinstance(self.conf, (dict, list, tuple)):
76 map = {'paths': 'hgweb-paths'}
76 map = {'paths': 'hgweb-paths'}
77 if not os.path.exists(self.conf):
77 if not os.path.exists(self.conf):
78 raise util.Abort(_('config file %s not found!') % self.conf)
78 raise util.Abort(_('config file %s not found!') % self.conf)
79 u.readconfig(self.conf, remap=map, trust=True)
79 u.readconfig(self.conf, remap=map, trust=True)
80 paths = []
80 paths = []
81 for name, ignored in u.configitems('hgweb-paths'):
81 for name, ignored in u.configitems('hgweb-paths'):
82 for path in u.configlist('hgweb-paths', name):
82 for path in u.configlist('hgweb-paths', name):
83 paths.append((name, path))
83 paths.append((name, path))
84 elif isinstance(self.conf, (list, tuple)):
84 elif isinstance(self.conf, (list, tuple)):
85 paths = self.conf
85 paths = self.conf
86 elif isinstance(self.conf, dict):
86 elif isinstance(self.conf, dict):
87 paths = self.conf.items()
87 paths = self.conf.items()
88
88
89 repos = findrepos(paths)
89 repos = findrepos(paths)
90 for prefix, root in u.configitems('collections'):
90 for prefix, root in u.configitems('collections'):
91 prefix = util.pconvert(prefix)
91 prefix = util.pconvert(prefix)
92 for path in scmutil.walkrepos(root, followsym=True):
92 for path in scmutil.walkrepos(root, followsym=True):
93 repo = os.path.normpath(path)
93 repo = os.path.normpath(path)
94 name = util.pconvert(repo)
94 name = util.pconvert(repo)
95 if name.startswith(prefix):
95 if name.startswith(prefix):
96 name = name[len(prefix):]
96 name = name[len(prefix):]
97 repos.append((name.lstrip('/'), repo))
97 repos.append((name.lstrip('/'), repo))
98
98
99 self.repos = repos
99 self.repos = repos
100 self.ui = u
100 self.ui = u
101 encoding.encoding = self.ui.config('web', 'encoding',
101 encoding.encoding = self.ui.config('web', 'encoding',
102 encoding.encoding)
102 encoding.encoding)
103 self.style = self.ui.config('web', 'style', 'paper')
103 self.style = self.ui.config('web', 'style', 'paper')
104 self.templatepath = self.ui.config('web', 'templates', None)
104 self.templatepath = self.ui.config('web', 'templates', None)
105 self.stripecount = self.ui.config('web', 'stripes', 1)
105 self.stripecount = self.ui.config('web', 'stripes', 1)
106 if self.stripecount:
106 if self.stripecount:
107 self.stripecount = int(self.stripecount)
107 self.stripecount = int(self.stripecount)
108 self._baseurl = self.ui.config('web', 'baseurl')
108 self._baseurl = self.ui.config('web', 'baseurl')
109 self.lastrefresh = time.time()
109 self.lastrefresh = time.time()
110
110
111 def run(self):
111 def run(self):
112 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
112 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
113 raise RuntimeError("This function is only intended to be "
113 raise RuntimeError("This function is only intended to be "
114 "called while running as a CGI script.")
114 "called while running as a CGI script.")
115 import mercurial.hgweb.wsgicgi as wsgicgi
115 import mercurial.hgweb.wsgicgi as wsgicgi
116 wsgicgi.launch(self)
116 wsgicgi.launch(self)
117
117
118 def __call__(self, env, respond):
118 def __call__(self, env, respond):
119 req = wsgirequest(env, respond)
119 req = wsgirequest(env, respond)
120 return self.run_wsgi(req)
120 return self.run_wsgi(req)
121
121
122 def read_allowed(self, ui, req):
122 def read_allowed(self, ui, req):
123 """Check allow_read and deny_read config options of a repo's ui object
123 """Check allow_read and deny_read config options of a repo's ui object
124 to determine user permissions. By default, with neither option set (or
124 to determine user permissions. By default, with neither option set (or
125 both empty), allow all users to read the repo. There are two ways a
125 both empty), allow all users to read the repo. There are two ways a
126 user can be denied read access: (1) deny_read is not empty, and the
126 user can be denied read access: (1) deny_read is not empty, and the
127 user is unauthenticated or deny_read contains user (or *), and (2)
127 user is unauthenticated or deny_read contains user (or *), and (2)
128 allow_read is not empty and the user is not in allow_read. Return True
128 allow_read is not empty and the user is not in allow_read. Return True
129 if user is allowed to read the repo, else return False."""
129 if user is allowed to read the repo, else return False."""
130
130
131 user = req.env.get('REMOTE_USER')
131 user = req.env.get('REMOTE_USER')
132
132
133 deny_read = ui.configlist('web', 'deny_read', untrusted=True)
133 deny_read = ui.configlist('web', 'deny_read', untrusted=True)
134 if deny_read and (not user or deny_read == ['*'] or user in deny_read):
134 if deny_read and (not user or deny_read == ['*'] or user in deny_read):
135 return False
135 return False
136
136
137 allow_read = ui.configlist('web', 'allow_read', untrusted=True)
137 allow_read = ui.configlist('web', 'allow_read', untrusted=True)
138 # by default, allow reading if no allow_read option has been set
138 # by default, allow reading if no allow_read option has been set
139 if (not allow_read) or (allow_read == ['*']) or (user in allow_read):
139 if (not allow_read) or (allow_read == ['*']) or (user in allow_read):
140 return True
140 return True
141
141
142 return False
142 return False
143
143
144 def run_wsgi(self, req):
144 def run_wsgi(self, req):
145 try:
145 try:
146 try:
146 try:
147 self.refresh()
147 self.refresh()
148
148
149 virtual = req.env.get("PATH_INFO", "").strip('/')
149 virtual = req.env.get("PATH_INFO", "").strip('/')
150 tmpl = self.templater(req)
150 tmpl = self.templater(req)
151 ctype = tmpl('mimetype', encoding=encoding.encoding)
151 ctype = tmpl('mimetype', encoding=encoding.encoding)
152 ctype = templater.stringify(ctype)
152 ctype = templater.stringify(ctype)
153
153
154 # a static file
154 # a static file
155 if virtual.startswith('static/') or 'static' in req.form:
155 if virtual.startswith('static/') or 'static' in req.form:
156 if virtual.startswith('static/'):
156 if virtual.startswith('static/'):
157 fname = virtual[7:]
157 fname = virtual[7:]
158 else:
158 else:
159 fname = req.form['static'][0]
159 fname = req.form['static'][0]
160 static = templater.templatepath('static')
160 static = templater.templatepath('static')
161 return (staticfile(static, fname, req),)
161 return (staticfile(static, fname, req),)
162
162
163 # top-level index
163 # top-level index
164 elif not virtual:
164 elif not virtual:
165 req.respond(HTTP_OK, ctype)
165 req.respond(HTTP_OK, ctype)
166 return self.makeindex(req, tmpl)
166 return self.makeindex(req, tmpl)
167
167
168 # nested indexes and hgwebs
168 # nested indexes and hgwebs
169
169
170 repos = dict(self.repos)
170 repos = dict(self.repos)
171 virtualrepo = virtual
171 virtualrepo = virtual
172 while virtualrepo:
172 while virtualrepo:
173 real = repos.get(virtualrepo)
173 real = repos.get(virtualrepo)
174 if real:
174 if real:
175 req.env['REPO_NAME'] = virtualrepo
175 req.env['REPO_NAME'] = virtualrepo
176 try:
176 try:
177 repo = hg.repository(self.ui, real)
177 repo = hg.repository(self.ui, real)
178 return hgweb(repo).run_wsgi(req)
178 return hgweb(repo).run_wsgi(req)
179 except IOError, inst:
179 except IOError, inst:
180 msg = inst.strerror
180 msg = inst.strerror
181 raise ErrorResponse(HTTP_SERVER_ERROR, msg)
181 raise ErrorResponse(HTTP_SERVER_ERROR, msg)
182 except error.RepoError, inst:
182 except error.RepoError, inst:
183 raise ErrorResponse(HTTP_SERVER_ERROR, str(inst))
183 raise ErrorResponse(HTTP_SERVER_ERROR, str(inst))
184
184
185 up = virtualrepo.rfind('/')
185 up = virtualrepo.rfind('/')
186 if up < 0:
186 if up < 0:
187 break
187 break
188 virtualrepo = virtualrepo[:up]
188 virtualrepo = virtualrepo[:up]
189
189
190 # browse subdirectories
190 # browse subdirectories
191 subdir = virtual + '/'
191 subdir = virtual + '/'
192 if [r for r in repos if r.startswith(subdir)]:
192 if [r for r in repos if r.startswith(subdir)]:
193 req.respond(HTTP_OK, ctype)
193 req.respond(HTTP_OK, ctype)
194 return self.makeindex(req, tmpl, subdir)
194 return self.makeindex(req, tmpl, subdir)
195
195
196 # prefixes not found
196 # prefixes not found
197 req.respond(HTTP_NOT_FOUND, ctype)
197 req.respond(HTTP_NOT_FOUND, ctype)
198 return tmpl("notfound", repo=virtual)
198 return tmpl("notfound", repo=virtual)
199
199
200 except ErrorResponse, err:
200 except ErrorResponse, err:
201 req.respond(err, ctype)
201 req.respond(err, ctype)
202 return tmpl('error', error=err.message or '')
202 return tmpl('error', error=err.message or '')
203 finally:
203 finally:
204 tmpl = None
204 tmpl = None
205
205
206 def makeindex(self, req, tmpl, subdir=""):
206 def makeindex(self, req, tmpl, subdir=""):
207
207
208 def archivelist(ui, nodeid, url):
208 def archivelist(ui, nodeid, url):
209 allowed = ui.configlist("web", "allow_archive", untrusted=True)
209 allowed = ui.configlist("web", "allow_archive", untrusted=True)
210 archives = []
210 archives = []
211 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
211 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
212 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
212 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
213 untrusted=True):
213 untrusted=True):
214 archives.append({"type" : i[0], "extension": i[1],
214 archives.append({"type" : i[0], "extension": i[1],
215 "node": nodeid, "url": url})
215 "node": nodeid, "url": url})
216 return archives
216 return archives
217
217
218 def rawentries(subdir="", **map):
218 def rawentries(subdir="", **map):
219
219
220 descend = self.ui.configbool('web', 'descend', True)
220 descend = self.ui.configbool('web', 'descend', True)
221 for name, path in self.repos:
221 for name, path in self.repos:
222
222
223 if not name.startswith(subdir):
223 if not name.startswith(subdir):
224 continue
224 continue
225 name = name[len(subdir):]
225 name = name[len(subdir):]
226 if not descend and '/' in name:
226 if not descend and '/' in name:
227 continue
227 continue
228
228
229 u = self.ui.copy()
229 u = self.ui.copy()
230 try:
230 try:
231 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
231 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
232 except Exception, e:
232 except Exception, e:
233 u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e))
233 u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e))
234 continue
234 continue
235 def get(section, name, default=None):
235 def get(section, name, default=None):
236 return u.config(section, name, default, untrusted=True)
236 return u.config(section, name, default, untrusted=True)
237
237
238 if u.configbool("web", "hidden", untrusted=True):
238 if u.configbool("web", "hidden", untrusted=True):
239 continue
239 continue
240
240
241 if not self.read_allowed(u, req):
241 if not self.read_allowed(u, req):
242 continue
242 continue
243
243
244 parts = [name]
244 parts = [name]
245 if 'PATH_INFO' in req.env:
245 if 'PATH_INFO' in req.env:
246 parts.insert(0, req.env['PATH_INFO'].rstrip('/'))
246 parts.insert(0, req.env['PATH_INFO'].rstrip('/'))
247 if req.env['SCRIPT_NAME']:
247 if req.env['SCRIPT_NAME']:
248 parts.insert(0, req.env['SCRIPT_NAME'])
248 parts.insert(0, req.env['SCRIPT_NAME'])
249 url = re.sub(r'/+', '/', '/'.join(parts) + '/')
249 url = re.sub(r'/+', '/', '/'.join(parts) + '/')
250
250
251 # update time with local timezone
251 # update time with local timezone
252 try:
252 try:
253 r = hg.repository(self.ui, path)
253 r = hg.repository(self.ui, path)
254 except IOError:
254 except IOError:
255 u.warn(_('error accessing repository at %s\n') % path)
255 u.warn(_('error accessing repository at %s\n') % path)
256 continue
256 continue
257 except error.RepoError:
257 except error.RepoError:
258 u.warn(_('error accessing repository at %s\n') % path)
258 u.warn(_('error accessing repository at %s\n') % path)
259 continue
259 continue
260 try:
260 try:
261 d = (get_mtime(r.spath), util.makedate()[1])
261 d = (get_mtime(r.spath), util.makedate()[1])
262 except OSError:
262 except OSError:
263 continue
263 continue
264
264
265 contact = get_contact(get)
265 contact = get_contact(get)
266 description = get("web", "description", "")
266 description = get("web", "description", "")
267 name = get("web", "name", name)
267 name = get("web", "name", name)
268 row = dict(contact=contact or "unknown",
268 row = dict(contact=contact or "unknown",
269 contact_sort=contact.upper() or "unknown",
269 contact_sort=contact.upper() or "unknown",
270 name=name,
270 name=name,
271 name_sort=name,
271 name_sort=name,
272 url=url,
272 url=url,
273 description=description or "unknown",
273 description=description or "unknown",
274 description_sort=description.upper() or "unknown",
274 description_sort=description.upper() or "unknown",
275 lastchange=d,
275 lastchange=d,
276 lastchange_sort=d[1]-d[0],
276 lastchange_sort=d[1]-d[0],
277 archives=archivelist(u, "tip", url))
277 archives=archivelist(u, "tip", url))
278 yield row
278 yield row
279
279
280 sortdefault = None, False
280 sortdefault = None, False
281 def entries(sortcolumn="", descending=False, subdir="", **map):
281 def entries(sortcolumn="", descending=False, subdir="", **map):
282 rows = rawentries(subdir=subdir, **map)
282 rows = rawentries(subdir=subdir, **map)
283
283
284 if sortcolumn and sortdefault != (sortcolumn, descending):
284 if sortcolumn and sortdefault != (sortcolumn, descending):
285 sortkey = '%s_sort' % sortcolumn
285 sortkey = '%s_sort' % sortcolumn
286 rows = sorted(rows, key=lambda x: x[sortkey],
286 rows = sorted(rows, key=lambda x: x[sortkey],
287 reverse=descending)
287 reverse=descending)
288 for row, parity in zip(rows, paritygen(self.stripecount)):
288 for row, parity in zip(rows, paritygen(self.stripecount)):
289 row['parity'] = parity
289 row['parity'] = parity
290 yield row
290 yield row
291
291
292 self.refresh()
292 self.refresh()
293 sortable = ["name", "description", "contact", "lastchange"]
293 sortable = ["name", "description", "contact", "lastchange"]
294 sortcolumn, descending = sortdefault
294 sortcolumn, descending = sortdefault
295 if 'sort' in req.form:
295 if 'sort' in req.form:
296 sortcolumn = req.form['sort'][0]
296 sortcolumn = req.form['sort'][0]
297 descending = sortcolumn.startswith('-')
297 descending = sortcolumn.startswith('-')
298 if descending:
298 if descending:
299 sortcolumn = sortcolumn[1:]
299 sortcolumn = sortcolumn[1:]
300 if sortcolumn not in sortable:
300 if sortcolumn not in sortable:
301 sortcolumn = ""
301 sortcolumn = ""
302
302
303 sort = [("sort_%s" % column,
303 sort = [("sort_%s" % column,
304 "%s%s" % ((not descending and column == sortcolumn)
304 "%s%s" % ((not descending and column == sortcolumn)
305 and "-" or "", column))
305 and "-" or "", column))
306 for column in sortable]
306 for column in sortable]
307
307
308 self.refresh()
308 self.refresh()
309 self.updatereqenv(req.env)
309 self.updatereqenv(req.env)
310
310
311 return tmpl("index", entries=entries, subdir=subdir,
311 return tmpl("index", entries=entries, subdir=subdir,
312 sortcolumn=sortcolumn, descending=descending,
312 sortcolumn=sortcolumn, descending=descending,
313 **dict(sort))
313 **dict(sort))
314
314
315 def templater(self, req):
315 def templater(self, req):
316
316
317 def header(**map):
317 def header(**map):
318 yield tmpl('header', encoding=encoding.encoding, **map)
318 yield tmpl('header', encoding=encoding.encoding, **map)
319
319
320 def footer(**map):
320 def footer(**map):
321 yield tmpl("footer", **map)
321 yield tmpl("footer", **map)
322
322
323 def motd(**map):
323 def motd(**map):
324 if self.motd is not None:
324 if self.motd is not None:
325 yield self.motd
325 yield self.motd
326 else:
326 else:
327 yield config('web', 'motd', '')
327 yield config('web', 'motd', '')
328
328
329 def config(section, name, default=None, untrusted=True):
329 def config(section, name, default=None, untrusted=True):
330 return self.ui.config(section, name, default, untrusted)
330 return self.ui.config(section, name, default, untrusted)
331
331
332 self.updatereqenv(req.env)
332 self.updatereqenv(req.env)
333
333
334 url = req.env.get('SCRIPT_NAME', '')
334 url = req.env.get('SCRIPT_NAME', '')
335 if not url.endswith('/'):
335 if not url.endswith('/'):
336 url += '/'
336 url += '/'
337
337
338 vars = {}
338 vars = {}
339 styles = (
339 styles = (
340 req.form.get('style', [None])[0],
340 req.form.get('style', [None])[0],
341 config('web', 'style'),
341 config('web', 'style'),
342 'paper'
342 'paper'
343 )
343 )
344 style, mapfile = templater.stylemap(styles, self.templatepath)
344 style, mapfile = templater.stylemap(styles, self.templatepath)
345 if style == styles[0]:
345 if style == styles[0]:
346 vars['style'] = style
346 vars['style'] = style
347
347
348 start = url[-1] == '?' and '&' or '?'
348 start = url[-1] == '?' and '&' or '?'
349 sessionvars = webutil.sessionvars(vars, start)
349 sessionvars = webutil.sessionvars(vars, start)
350 logourl = config('web', 'logourl', 'http://mercurial.selenic.com/')
350 logourl = config('web', 'logourl', 'http://mercurial.selenic.com/')
351 staticurl = config('web', 'staticurl') or url + 'static/'
351 staticurl = config('web', 'staticurl') or url + 'static/'
352 if not staticurl.endswith('/'):
352 if not staticurl.endswith('/'):
353 staticurl += '/'
353 staticurl += '/'
354
354
355 tmpl = templater.templater(mapfile,
355 tmpl = templater.templater(mapfile,
356 defaults={"header": header,
356 defaults={"header": header,
357 "footer": footer,
357 "footer": footer,
358 "motd": motd,
358 "motd": motd,
359 "url": url,
359 "url": url,
360 "logourl": logourl,
360 "logourl": logourl,
361 "staticurl": staticurl,
361 "staticurl": staticurl,
362 "sessionvars": sessionvars})
362 "sessionvars": sessionvars})
363 return tmpl
363 return tmpl
364
364
365 def updatereqenv(self, env):
365 def updatereqenv(self, env):
366 if self._baseurl is not None:
366 if self._baseurl is not None:
367 u = url.url(self._baseurl)
367 u = util.url(self._baseurl)
368 env['SERVER_NAME'] = u.host
368 env['SERVER_NAME'] = u.host
369 if u.port:
369 if u.port:
370 env['SERVER_PORT'] = u.port
370 env['SERVER_PORT'] = u.port
371 env['SCRIPT_NAME'] = '/' + u.path
371 env['SCRIPT_NAME'] = '/' + u.path
@@ -1,210 +1,210 b''
1 # httprepo.py - HTTP repository proxy classes for mercurial
1 # httprepo.py - HTTP repository proxy classes for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from node import nullid
9 from node import nullid
10 from i18n import _
10 from i18n import _
11 import changegroup, statichttprepo, error, url, util, wireproto
11 import changegroup, statichttprepo, error, url, util, wireproto
12 import os, urllib, urllib2, zlib, httplib
12 import os, urllib, urllib2, zlib, httplib
13 import errno, socket
13 import errno, socket
14
14
15 def zgenerator(f):
15 def zgenerator(f):
16 zd = zlib.decompressobj()
16 zd = zlib.decompressobj()
17 try:
17 try:
18 for chunk in util.filechunkiter(f):
18 for chunk in util.filechunkiter(f):
19 while chunk:
19 while chunk:
20 yield zd.decompress(chunk, 2**18)
20 yield zd.decompress(chunk, 2**18)
21 chunk = zd.unconsumed_tail
21 chunk = zd.unconsumed_tail
22 except httplib.HTTPException:
22 except httplib.HTTPException:
23 raise IOError(None, _('connection ended unexpectedly'))
23 raise IOError(None, _('connection ended unexpectedly'))
24 yield zd.flush()
24 yield zd.flush()
25
25
26 class httprepository(wireproto.wirerepository):
26 class httprepository(wireproto.wirerepository):
27 def __init__(self, ui, path):
27 def __init__(self, ui, path):
28 self.path = path
28 self.path = path
29 self.caps = None
29 self.caps = None
30 self.handler = None
30 self.handler = None
31 u = url.url(path)
31 u = util.url(path)
32 if u.query or u.fragment:
32 if u.query or u.fragment:
33 raise util.Abort(_('unsupported URL component: "%s"') %
33 raise util.Abort(_('unsupported URL component: "%s"') %
34 (u.query or u.fragment))
34 (u.query or u.fragment))
35
35
36 # urllib cannot handle URLs with embedded user or passwd
36 # urllib cannot handle URLs with embedded user or passwd
37 self._url, authinfo = u.authinfo()
37 self._url, authinfo = u.authinfo()
38
38
39 self.ui = ui
39 self.ui = ui
40 self.ui.debug('using %s\n' % self._url)
40 self.ui.debug('using %s\n' % self._url)
41
41
42 self.urlopener = url.opener(ui, authinfo)
42 self.urlopener = url.opener(ui, authinfo)
43
43
44 def __del__(self):
44 def __del__(self):
45 for h in self.urlopener.handlers:
45 for h in self.urlopener.handlers:
46 h.close()
46 h.close()
47 if hasattr(h, "close_all"):
47 if hasattr(h, "close_all"):
48 h.close_all()
48 h.close_all()
49
49
50 def url(self):
50 def url(self):
51 return self.path
51 return self.path
52
52
53 # look up capabilities only when needed
53 # look up capabilities only when needed
54
54
55 def _fetchcaps(self):
55 def _fetchcaps(self):
56 self.caps = set(self._call('capabilities').split())
56 self.caps = set(self._call('capabilities').split())
57
57
58 def get_caps(self):
58 def get_caps(self):
59 if self.caps is None:
59 if self.caps is None:
60 try:
60 try:
61 self._fetchcaps()
61 self._fetchcaps()
62 except error.RepoError:
62 except error.RepoError:
63 self.caps = set()
63 self.caps = set()
64 self.ui.debug('capabilities: %s\n' %
64 self.ui.debug('capabilities: %s\n' %
65 (' '.join(self.caps or ['none'])))
65 (' '.join(self.caps or ['none'])))
66 return self.caps
66 return self.caps
67
67
68 capabilities = property(get_caps)
68 capabilities = property(get_caps)
69
69
70 def lock(self):
70 def lock(self):
71 raise util.Abort(_('operation not supported over http'))
71 raise util.Abort(_('operation not supported over http'))
72
72
73 def _callstream(self, cmd, **args):
73 def _callstream(self, cmd, **args):
74 if cmd == 'pushkey':
74 if cmd == 'pushkey':
75 args['data'] = ''
75 args['data'] = ''
76 data = args.pop('data', None)
76 data = args.pop('data', None)
77 headers = args.pop('headers', {})
77 headers = args.pop('headers', {})
78 self.ui.debug("sending %s command\n" % cmd)
78 self.ui.debug("sending %s command\n" % cmd)
79 q = [('cmd', cmd)] + sorted(args.items())
79 q = [('cmd', cmd)] + sorted(args.items())
80 qs = '?%s' % urllib.urlencode(q)
80 qs = '?%s' % urllib.urlencode(q)
81 cu = "%s%s" % (self._url, qs)
81 cu = "%s%s" % (self._url, qs)
82 req = urllib2.Request(cu, data, headers)
82 req = urllib2.Request(cu, data, headers)
83 if data is not None:
83 if data is not None:
84 # len(data) is broken if data doesn't fit into Py_ssize_t
84 # len(data) is broken if data doesn't fit into Py_ssize_t
85 # add the header ourself to avoid OverflowError
85 # add the header ourself to avoid OverflowError
86 size = data.__len__()
86 size = data.__len__()
87 self.ui.debug("sending %s bytes\n" % size)
87 self.ui.debug("sending %s bytes\n" % size)
88 req.add_unredirected_header('Content-Length', '%d' % size)
88 req.add_unredirected_header('Content-Length', '%d' % size)
89 try:
89 try:
90 resp = self.urlopener.open(req)
90 resp = self.urlopener.open(req)
91 except urllib2.HTTPError, inst:
91 except urllib2.HTTPError, inst:
92 if inst.code == 401:
92 if inst.code == 401:
93 raise util.Abort(_('authorization failed'))
93 raise util.Abort(_('authorization failed'))
94 raise
94 raise
95 except httplib.HTTPException, inst:
95 except httplib.HTTPException, inst:
96 self.ui.debug('http error while sending %s command\n' % cmd)
96 self.ui.debug('http error while sending %s command\n' % cmd)
97 self.ui.traceback()
97 self.ui.traceback()
98 raise IOError(None, inst)
98 raise IOError(None, inst)
99 except IndexError:
99 except IndexError:
100 # this only happens with Python 2.3, later versions raise URLError
100 # this only happens with Python 2.3, later versions raise URLError
101 raise util.Abort(_('http error, possibly caused by proxy setting'))
101 raise util.Abort(_('http error, possibly caused by proxy setting'))
102 # record the url we got redirected to
102 # record the url we got redirected to
103 resp_url = resp.geturl()
103 resp_url = resp.geturl()
104 if resp_url.endswith(qs):
104 if resp_url.endswith(qs):
105 resp_url = resp_url[:-len(qs)]
105 resp_url = resp_url[:-len(qs)]
106 if self._url.rstrip('/') != resp_url.rstrip('/'):
106 if self._url.rstrip('/') != resp_url.rstrip('/'):
107 self.ui.status(_('real URL is %s\n') % resp_url)
107 self.ui.status(_('real URL is %s\n') % resp_url)
108 self._url = resp_url
108 self._url = resp_url
109 try:
109 try:
110 proto = resp.getheader('content-type')
110 proto = resp.getheader('content-type')
111 except AttributeError:
111 except AttributeError:
112 proto = resp.headers['content-type']
112 proto = resp.headers['content-type']
113
113
114 safeurl = url.hidepassword(self._url)
114 safeurl = util.hidepassword(self._url)
115 # accept old "text/plain" and "application/hg-changegroup" for now
115 # accept old "text/plain" and "application/hg-changegroup" for now
116 if not (proto.startswith('application/mercurial-') or
116 if not (proto.startswith('application/mercurial-') or
117 proto.startswith('text/plain') or
117 proto.startswith('text/plain') or
118 proto.startswith('application/hg-changegroup')):
118 proto.startswith('application/hg-changegroup')):
119 self.ui.debug("requested URL: '%s'\n" % url.hidepassword(cu))
119 self.ui.debug("requested URL: '%s'\n" % util.hidepassword(cu))
120 raise error.RepoError(
120 raise error.RepoError(
121 _("'%s' does not appear to be an hg repository:\n"
121 _("'%s' does not appear to be an hg repository:\n"
122 "---%%<--- (%s)\n%s\n---%%<---\n")
122 "---%%<--- (%s)\n%s\n---%%<---\n")
123 % (safeurl, proto, resp.read()))
123 % (safeurl, proto, resp.read()))
124
124
125 if proto.startswith('application/mercurial-'):
125 if proto.startswith('application/mercurial-'):
126 try:
126 try:
127 version = proto.split('-', 1)[1]
127 version = proto.split('-', 1)[1]
128 version_info = tuple([int(n) for n in version.split('.')])
128 version_info = tuple([int(n) for n in version.split('.')])
129 except ValueError:
129 except ValueError:
130 raise error.RepoError(_("'%s' sent a broken Content-Type "
130 raise error.RepoError(_("'%s' sent a broken Content-Type "
131 "header (%s)") % (safeurl, proto))
131 "header (%s)") % (safeurl, proto))
132 if version_info > (0, 1):
132 if version_info > (0, 1):
133 raise error.RepoError(_("'%s' uses newer protocol %s") %
133 raise error.RepoError(_("'%s' uses newer protocol %s") %
134 (safeurl, version))
134 (safeurl, version))
135
135
136 return resp
136 return resp
137
137
138 def _call(self, cmd, **args):
138 def _call(self, cmd, **args):
139 fp = self._callstream(cmd, **args)
139 fp = self._callstream(cmd, **args)
140 try:
140 try:
141 return fp.read()
141 return fp.read()
142 finally:
142 finally:
143 # if using keepalive, allow connection to be reused
143 # if using keepalive, allow connection to be reused
144 fp.close()
144 fp.close()
145
145
146 def _callpush(self, cmd, cg, **args):
146 def _callpush(self, cmd, cg, **args):
147 # have to stream bundle to a temp file because we do not have
147 # have to stream bundle to a temp file because we do not have
148 # http 1.1 chunked transfer.
148 # http 1.1 chunked transfer.
149
149
150 types = self.capable('unbundle')
150 types = self.capable('unbundle')
151 try:
151 try:
152 types = types.split(',')
152 types = types.split(',')
153 except AttributeError:
153 except AttributeError:
154 # servers older than d1b16a746db6 will send 'unbundle' as a
154 # servers older than d1b16a746db6 will send 'unbundle' as a
155 # boolean capability. They only support headerless/uncompressed
155 # boolean capability. They only support headerless/uncompressed
156 # bundles.
156 # bundles.
157 types = [""]
157 types = [""]
158 for x in types:
158 for x in types:
159 if x in changegroup.bundletypes:
159 if x in changegroup.bundletypes:
160 type = x
160 type = x
161 break
161 break
162
162
163 tempname = changegroup.writebundle(cg, None, type)
163 tempname = changegroup.writebundle(cg, None, type)
164 fp = url.httpsendfile(self.ui, tempname, "rb")
164 fp = url.httpsendfile(self.ui, tempname, "rb")
165 headers = {'Content-Type': 'application/mercurial-0.1'}
165 headers = {'Content-Type': 'application/mercurial-0.1'}
166
166
167 try:
167 try:
168 try:
168 try:
169 r = self._call(cmd, data=fp, headers=headers, **args)
169 r = self._call(cmd, data=fp, headers=headers, **args)
170 return r.split('\n', 1)
170 return r.split('\n', 1)
171 except socket.error, err:
171 except socket.error, err:
172 if err.args[0] in (errno.ECONNRESET, errno.EPIPE):
172 if err.args[0] in (errno.ECONNRESET, errno.EPIPE):
173 raise util.Abort(_('push failed: %s') % err.args[1])
173 raise util.Abort(_('push failed: %s') % err.args[1])
174 raise util.Abort(err.args[1])
174 raise util.Abort(err.args[1])
175 finally:
175 finally:
176 fp.close()
176 fp.close()
177 os.unlink(tempname)
177 os.unlink(tempname)
178
178
179 def _abort(self, exception):
179 def _abort(self, exception):
180 raise exception
180 raise exception
181
181
182 def _decompress(self, stream):
182 def _decompress(self, stream):
183 return util.chunkbuffer(zgenerator(stream))
183 return util.chunkbuffer(zgenerator(stream))
184
184
185 class httpsrepository(httprepository):
185 class httpsrepository(httprepository):
186 def __init__(self, ui, path):
186 def __init__(self, ui, path):
187 if not url.has_https:
187 if not url.has_https:
188 raise util.Abort(_('Python support for SSL and HTTPS '
188 raise util.Abort(_('Python support for SSL and HTTPS '
189 'is not installed'))
189 'is not installed'))
190 httprepository.__init__(self, ui, path)
190 httprepository.__init__(self, ui, path)
191
191
192 def instance(ui, path, create):
192 def instance(ui, path, create):
193 if create:
193 if create:
194 raise util.Abort(_('cannot create new http repository'))
194 raise util.Abort(_('cannot create new http repository'))
195 try:
195 try:
196 if path.startswith('https:'):
196 if path.startswith('https:'):
197 inst = httpsrepository(ui, path)
197 inst = httpsrepository(ui, path)
198 else:
198 else:
199 inst = httprepository(ui, path)
199 inst = httprepository(ui, path)
200 try:
200 try:
201 # Try to do useful work when checking compatibility.
201 # Try to do useful work when checking compatibility.
202 # Usually saves a roundtrip since we want the caps anyway.
202 # Usually saves a roundtrip since we want the caps anyway.
203 inst._fetchcaps()
203 inst._fetchcaps()
204 except error.RepoError:
204 except error.RepoError:
205 # No luck, try older compatibility check.
205 # No luck, try older compatibility check.
206 inst.between([(nullid, nullid)])
206 inst.between([(nullid, nullid)])
207 return inst
207 return inst
208 except error.RepoError:
208 except error.RepoError:
209 ui.note('(falling back to static-http)\n')
209 ui.note('(falling back to static-http)\n')
210 return statichttprepo.instance(ui, "static-" + path, create)
210 return statichttprepo.instance(ui, "static-" + path, create)
@@ -1,1943 +1,1942 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup, subrepo, discovery, pushkey
10 import repo, changegroup, subrepo, discovery, pushkey
11 import changelog, dirstate, filelog, manifest, context, bookmarks
11 import changelog, dirstate, filelog, manifest, context, bookmarks
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import scmutil, util, extensions, hook, error
13 import scmutil, util, extensions, hook, error
14 import match as matchmod
14 import match as matchmod
15 import merge as mergemod
15 import merge as mergemod
16 import tags as tagsmod
16 import tags as tagsmod
17 import url as urlmod
18 from lock import release
17 from lock import release
19 import weakref, errno, os, time, inspect
18 import weakref, errno, os, time, inspect
20 propertycache = util.propertycache
19 propertycache = util.propertycache
21
20
22 class localrepository(repo.repository):
21 class localrepository(repo.repository):
23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
22 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
24 'known', 'getbundle'))
23 'known', 'getbundle'))
25 supportedformats = set(('revlogv1', 'parentdelta'))
24 supportedformats = set(('revlogv1', 'parentdelta'))
26 supported = supportedformats | set(('store', 'fncache', 'shared',
25 supported = supportedformats | set(('store', 'fncache', 'shared',
27 'dotencode'))
26 'dotencode'))
28
27
29 def __init__(self, baseui, path=None, create=0):
28 def __init__(self, baseui, path=None, create=0):
30 repo.repository.__init__(self)
29 repo.repository.__init__(self)
31 self.root = os.path.realpath(util.expandpath(path))
30 self.root = os.path.realpath(util.expandpath(path))
32 self.path = os.path.join(self.root, ".hg")
31 self.path = os.path.join(self.root, ".hg")
33 self.origroot = path
32 self.origroot = path
34 self.auditor = scmutil.path_auditor(self.root, self._checknested)
33 self.auditor = scmutil.path_auditor(self.root, self._checknested)
35 self.opener = scmutil.opener(self.path)
34 self.opener = scmutil.opener(self.path)
36 self.wopener = scmutil.opener(self.root)
35 self.wopener = scmutil.opener(self.root)
37 self.baseui = baseui
36 self.baseui = baseui
38 self.ui = baseui.copy()
37 self.ui = baseui.copy()
39
38
40 try:
39 try:
41 self.ui.readconfig(self.join("hgrc"), self.root)
40 self.ui.readconfig(self.join("hgrc"), self.root)
42 extensions.loadall(self.ui)
41 extensions.loadall(self.ui)
43 except IOError:
42 except IOError:
44 pass
43 pass
45
44
46 if not os.path.isdir(self.path):
45 if not os.path.isdir(self.path):
47 if create:
46 if create:
48 if not os.path.exists(path):
47 if not os.path.exists(path):
49 util.makedirs(path)
48 util.makedirs(path)
50 util.makedir(self.path, notindexed=True)
49 util.makedir(self.path, notindexed=True)
51 requirements = ["revlogv1"]
50 requirements = ["revlogv1"]
52 if self.ui.configbool('format', 'usestore', True):
51 if self.ui.configbool('format', 'usestore', True):
53 os.mkdir(os.path.join(self.path, "store"))
52 os.mkdir(os.path.join(self.path, "store"))
54 requirements.append("store")
53 requirements.append("store")
55 if self.ui.configbool('format', 'usefncache', True):
54 if self.ui.configbool('format', 'usefncache', True):
56 requirements.append("fncache")
55 requirements.append("fncache")
57 if self.ui.configbool('format', 'dotencode', True):
56 if self.ui.configbool('format', 'dotencode', True):
58 requirements.append('dotencode')
57 requirements.append('dotencode')
59 # create an invalid changelog
58 # create an invalid changelog
60 self.opener("00changelog.i", "a").write(
59 self.opener("00changelog.i", "a").write(
61 '\0\0\0\2' # represents revlogv2
60 '\0\0\0\2' # represents revlogv2
62 ' dummy changelog to prevent using the old repo layout'
61 ' dummy changelog to prevent using the old repo layout'
63 )
62 )
64 if self.ui.configbool('format', 'parentdelta', False):
63 if self.ui.configbool('format', 'parentdelta', False):
65 requirements.append("parentdelta")
64 requirements.append("parentdelta")
66 else:
65 else:
67 raise error.RepoError(_("repository %s not found") % path)
66 raise error.RepoError(_("repository %s not found") % path)
68 elif create:
67 elif create:
69 raise error.RepoError(_("repository %s already exists") % path)
68 raise error.RepoError(_("repository %s already exists") % path)
70 else:
69 else:
71 # find requirements
70 # find requirements
72 requirements = set()
71 requirements = set()
73 try:
72 try:
74 requirements = set(self.opener("requires").read().splitlines())
73 requirements = set(self.opener("requires").read().splitlines())
75 except IOError, inst:
74 except IOError, inst:
76 if inst.errno != errno.ENOENT:
75 if inst.errno != errno.ENOENT:
77 raise
76 raise
78 for r in requirements - self.supported:
77 for r in requirements - self.supported:
79 raise error.RequirementError(
78 raise error.RequirementError(
80 _("requirement '%s' not supported") % r)
79 _("requirement '%s' not supported") % r)
81
80
82 self.sharedpath = self.path
81 self.sharedpath = self.path
83 try:
82 try:
84 s = os.path.realpath(self.opener("sharedpath").read())
83 s = os.path.realpath(self.opener("sharedpath").read())
85 if not os.path.exists(s):
84 if not os.path.exists(s):
86 raise error.RepoError(
85 raise error.RepoError(
87 _('.hg/sharedpath points to nonexistent directory %s') % s)
86 _('.hg/sharedpath points to nonexistent directory %s') % s)
88 self.sharedpath = s
87 self.sharedpath = s
89 except IOError, inst:
88 except IOError, inst:
90 if inst.errno != errno.ENOENT:
89 if inst.errno != errno.ENOENT:
91 raise
90 raise
92
91
93 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
92 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
94 self.spath = self.store.path
93 self.spath = self.store.path
95 self.sopener = self.store.opener
94 self.sopener = self.store.opener
96 self.sjoin = self.store.join
95 self.sjoin = self.store.join
97 self.opener.createmode = self.store.createmode
96 self.opener.createmode = self.store.createmode
98 self._applyrequirements(requirements)
97 self._applyrequirements(requirements)
99 if create:
98 if create:
100 self._writerequirements()
99 self._writerequirements()
101
100
102 # These two define the set of tags for this repository. _tags
101 # These two define the set of tags for this repository. _tags
103 # maps tag name to node; _tagtypes maps tag name to 'global' or
102 # maps tag name to node; _tagtypes maps tag name to 'global' or
104 # 'local'. (Global tags are defined by .hgtags across all
103 # 'local'. (Global tags are defined by .hgtags across all
105 # heads, and local tags are defined in .hg/localtags.) They
104 # heads, and local tags are defined in .hg/localtags.) They
106 # constitute the in-memory cache of tags.
105 # constitute the in-memory cache of tags.
107 self._tags = None
106 self._tags = None
108 self._tagtypes = None
107 self._tagtypes = None
109
108
110 self._branchcache = None
109 self._branchcache = None
111 self._branchcachetip = None
110 self._branchcachetip = None
112 self.nodetagscache = None
111 self.nodetagscache = None
113 self.filterpats = {}
112 self.filterpats = {}
114 self._datafilters = {}
113 self._datafilters = {}
115 self._transref = self._lockref = self._wlockref = None
114 self._transref = self._lockref = self._wlockref = None
116
115
117 def _applyrequirements(self, requirements):
116 def _applyrequirements(self, requirements):
118 self.requirements = requirements
117 self.requirements = requirements
119 self.sopener.options = {}
118 self.sopener.options = {}
120 if 'parentdelta' in requirements:
119 if 'parentdelta' in requirements:
121 self.sopener.options['parentdelta'] = 1
120 self.sopener.options['parentdelta'] = 1
122
121
123 def _writerequirements(self):
122 def _writerequirements(self):
124 reqfile = self.opener("requires", "w")
123 reqfile = self.opener("requires", "w")
125 for r in self.requirements:
124 for r in self.requirements:
126 reqfile.write("%s\n" % r)
125 reqfile.write("%s\n" % r)
127 reqfile.close()
126 reqfile.close()
128
127
129 def _checknested(self, path):
128 def _checknested(self, path):
130 """Determine if path is a legal nested repository."""
129 """Determine if path is a legal nested repository."""
131 if not path.startswith(self.root):
130 if not path.startswith(self.root):
132 return False
131 return False
133 subpath = path[len(self.root) + 1:]
132 subpath = path[len(self.root) + 1:]
134
133
135 # XXX: Checking against the current working copy is wrong in
134 # XXX: Checking against the current working copy is wrong in
136 # the sense that it can reject things like
135 # the sense that it can reject things like
137 #
136 #
138 # $ hg cat -r 10 sub/x.txt
137 # $ hg cat -r 10 sub/x.txt
139 #
138 #
140 # if sub/ is no longer a subrepository in the working copy
139 # if sub/ is no longer a subrepository in the working copy
141 # parent revision.
140 # parent revision.
142 #
141 #
143 # However, it can of course also allow things that would have
142 # However, it can of course also allow things that would have
144 # been rejected before, such as the above cat command if sub/
143 # been rejected before, such as the above cat command if sub/
145 # is a subrepository now, but was a normal directory before.
144 # is a subrepository now, but was a normal directory before.
146 # The old path auditor would have rejected by mistake since it
145 # The old path auditor would have rejected by mistake since it
147 # panics when it sees sub/.hg/.
146 # panics when it sees sub/.hg/.
148 #
147 #
149 # All in all, checking against the working copy seems sensible
148 # All in all, checking against the working copy seems sensible
150 # since we want to prevent access to nested repositories on
149 # since we want to prevent access to nested repositories on
151 # the filesystem *now*.
150 # the filesystem *now*.
152 ctx = self[None]
151 ctx = self[None]
153 parts = util.splitpath(subpath)
152 parts = util.splitpath(subpath)
154 while parts:
153 while parts:
155 prefix = os.sep.join(parts)
154 prefix = os.sep.join(parts)
156 if prefix in ctx.substate:
155 if prefix in ctx.substate:
157 if prefix == subpath:
156 if prefix == subpath:
158 return True
157 return True
159 else:
158 else:
160 sub = ctx.sub(prefix)
159 sub = ctx.sub(prefix)
161 return sub.checknested(subpath[len(prefix) + 1:])
160 return sub.checknested(subpath[len(prefix) + 1:])
162 else:
161 else:
163 parts.pop()
162 parts.pop()
164 return False
163 return False
165
164
166 @util.propertycache
165 @util.propertycache
167 def _bookmarks(self):
166 def _bookmarks(self):
168 return bookmarks.read(self)
167 return bookmarks.read(self)
169
168
170 @util.propertycache
169 @util.propertycache
171 def _bookmarkcurrent(self):
170 def _bookmarkcurrent(self):
172 return bookmarks.readcurrent(self)
171 return bookmarks.readcurrent(self)
173
172
174 @propertycache
173 @propertycache
175 def changelog(self):
174 def changelog(self):
176 c = changelog.changelog(self.sopener)
175 c = changelog.changelog(self.sopener)
177 if 'HG_PENDING' in os.environ:
176 if 'HG_PENDING' in os.environ:
178 p = os.environ['HG_PENDING']
177 p = os.environ['HG_PENDING']
179 if p.startswith(self.root):
178 if p.startswith(self.root):
180 c.readpending('00changelog.i.a')
179 c.readpending('00changelog.i.a')
181 self.sopener.options['defversion'] = c.version
180 self.sopener.options['defversion'] = c.version
182 return c
181 return c
183
182
184 @propertycache
183 @propertycache
185 def manifest(self):
184 def manifest(self):
186 return manifest.manifest(self.sopener)
185 return manifest.manifest(self.sopener)
187
186
188 @propertycache
187 @propertycache
189 def dirstate(self):
188 def dirstate(self):
190 warned = [0]
189 warned = [0]
191 def validate(node):
190 def validate(node):
192 try:
191 try:
193 self.changelog.rev(node)
192 self.changelog.rev(node)
194 return node
193 return node
195 except error.LookupError:
194 except error.LookupError:
196 if not warned[0]:
195 if not warned[0]:
197 warned[0] = True
196 warned[0] = True
198 self.ui.warn(_("warning: ignoring unknown"
197 self.ui.warn(_("warning: ignoring unknown"
199 " working parent %s!\n") % short(node))
198 " working parent %s!\n") % short(node))
200 return nullid
199 return nullid
201
200
202 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
201 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
203
202
204 def __getitem__(self, changeid):
203 def __getitem__(self, changeid):
205 if changeid is None:
204 if changeid is None:
206 return context.workingctx(self)
205 return context.workingctx(self)
207 return context.changectx(self, changeid)
206 return context.changectx(self, changeid)
208
207
209 def __contains__(self, changeid):
208 def __contains__(self, changeid):
210 try:
209 try:
211 return bool(self.lookup(changeid))
210 return bool(self.lookup(changeid))
212 except error.RepoLookupError:
211 except error.RepoLookupError:
213 return False
212 return False
214
213
215 def __nonzero__(self):
214 def __nonzero__(self):
216 return True
215 return True
217
216
218 def __len__(self):
217 def __len__(self):
219 return len(self.changelog)
218 return len(self.changelog)
220
219
221 def __iter__(self):
220 def __iter__(self):
222 for i in xrange(len(self)):
221 for i in xrange(len(self)):
223 yield i
222 yield i
224
223
225 def url(self):
224 def url(self):
226 return 'file:' + self.root
225 return 'file:' + self.root
227
226
228 def hook(self, name, throw=False, **args):
227 def hook(self, name, throw=False, **args):
229 return hook.hook(self.ui, self, name, throw, **args)
228 return hook.hook(self.ui, self, name, throw, **args)
230
229
231 tag_disallowed = ':\r\n'
230 tag_disallowed = ':\r\n'
232
231
233 def _tag(self, names, node, message, local, user, date, extra={}):
232 def _tag(self, names, node, message, local, user, date, extra={}):
234 if isinstance(names, str):
233 if isinstance(names, str):
235 allchars = names
234 allchars = names
236 names = (names,)
235 names = (names,)
237 else:
236 else:
238 allchars = ''.join(names)
237 allchars = ''.join(names)
239 for c in self.tag_disallowed:
238 for c in self.tag_disallowed:
240 if c in allchars:
239 if c in allchars:
241 raise util.Abort(_('%r cannot be used in a tag name') % c)
240 raise util.Abort(_('%r cannot be used in a tag name') % c)
242
241
243 branches = self.branchmap()
242 branches = self.branchmap()
244 for name in names:
243 for name in names:
245 self.hook('pretag', throw=True, node=hex(node), tag=name,
244 self.hook('pretag', throw=True, node=hex(node), tag=name,
246 local=local)
245 local=local)
247 if name in branches:
246 if name in branches:
248 self.ui.warn(_("warning: tag %s conflicts with existing"
247 self.ui.warn(_("warning: tag %s conflicts with existing"
249 " branch name\n") % name)
248 " branch name\n") % name)
250
249
251 def writetags(fp, names, munge, prevtags):
250 def writetags(fp, names, munge, prevtags):
252 fp.seek(0, 2)
251 fp.seek(0, 2)
253 if prevtags and prevtags[-1] != '\n':
252 if prevtags and prevtags[-1] != '\n':
254 fp.write('\n')
253 fp.write('\n')
255 for name in names:
254 for name in names:
256 m = munge and munge(name) or name
255 m = munge and munge(name) or name
257 if self._tagtypes and name in self._tagtypes:
256 if self._tagtypes and name in self._tagtypes:
258 old = self._tags.get(name, nullid)
257 old = self._tags.get(name, nullid)
259 fp.write('%s %s\n' % (hex(old), m))
258 fp.write('%s %s\n' % (hex(old), m))
260 fp.write('%s %s\n' % (hex(node), m))
259 fp.write('%s %s\n' % (hex(node), m))
261 fp.close()
260 fp.close()
262
261
263 prevtags = ''
262 prevtags = ''
264 if local:
263 if local:
265 try:
264 try:
266 fp = self.opener('localtags', 'r+')
265 fp = self.opener('localtags', 'r+')
267 except IOError:
266 except IOError:
268 fp = self.opener('localtags', 'a')
267 fp = self.opener('localtags', 'a')
269 else:
268 else:
270 prevtags = fp.read()
269 prevtags = fp.read()
271
270
272 # local tags are stored in the current charset
271 # local tags are stored in the current charset
273 writetags(fp, names, None, prevtags)
272 writetags(fp, names, None, prevtags)
274 for name in names:
273 for name in names:
275 self.hook('tag', node=hex(node), tag=name, local=local)
274 self.hook('tag', node=hex(node), tag=name, local=local)
276 return
275 return
277
276
278 try:
277 try:
279 fp = self.wfile('.hgtags', 'rb+')
278 fp = self.wfile('.hgtags', 'rb+')
280 except IOError:
279 except IOError:
281 fp = self.wfile('.hgtags', 'ab')
280 fp = self.wfile('.hgtags', 'ab')
282 else:
281 else:
283 prevtags = fp.read()
282 prevtags = fp.read()
284
283
285 # committed tags are stored in UTF-8
284 # committed tags are stored in UTF-8
286 writetags(fp, names, encoding.fromlocal, prevtags)
285 writetags(fp, names, encoding.fromlocal, prevtags)
287
286
288 fp.close()
287 fp.close()
289
288
290 if '.hgtags' not in self.dirstate:
289 if '.hgtags' not in self.dirstate:
291 self[None].add(['.hgtags'])
290 self[None].add(['.hgtags'])
292
291
293 m = matchmod.exact(self.root, '', ['.hgtags'])
292 m = matchmod.exact(self.root, '', ['.hgtags'])
294 tagnode = self.commit(message, user, date, extra=extra, match=m)
293 tagnode = self.commit(message, user, date, extra=extra, match=m)
295
294
296 for name in names:
295 for name in names:
297 self.hook('tag', node=hex(node), tag=name, local=local)
296 self.hook('tag', node=hex(node), tag=name, local=local)
298
297
299 return tagnode
298 return tagnode
300
299
301 def tag(self, names, node, message, local, user, date):
300 def tag(self, names, node, message, local, user, date):
302 '''tag a revision with one or more symbolic names.
301 '''tag a revision with one or more symbolic names.
303
302
304 names is a list of strings or, when adding a single tag, names may be a
303 names is a list of strings or, when adding a single tag, names may be a
305 string.
304 string.
306
305
307 if local is True, the tags are stored in a per-repository file.
306 if local is True, the tags are stored in a per-repository file.
308 otherwise, they are stored in the .hgtags file, and a new
307 otherwise, they are stored in the .hgtags file, and a new
309 changeset is committed with the change.
308 changeset is committed with the change.
310
309
311 keyword arguments:
310 keyword arguments:
312
311
313 local: whether to store tags in non-version-controlled file
312 local: whether to store tags in non-version-controlled file
314 (default False)
313 (default False)
315
314
316 message: commit message to use if committing
315 message: commit message to use if committing
317
316
318 user: name of user to use if committing
317 user: name of user to use if committing
319
318
320 date: date tuple to use if committing'''
319 date: date tuple to use if committing'''
321
320
322 if not local:
321 if not local:
323 for x in self.status()[:5]:
322 for x in self.status()[:5]:
324 if '.hgtags' in x:
323 if '.hgtags' in x:
325 raise util.Abort(_('working copy of .hgtags is changed '
324 raise util.Abort(_('working copy of .hgtags is changed '
326 '(please commit .hgtags manually)'))
325 '(please commit .hgtags manually)'))
327
326
328 self.tags() # instantiate the cache
327 self.tags() # instantiate the cache
329 self._tag(names, node, message, local, user, date)
328 self._tag(names, node, message, local, user, date)
330
329
331 def tags(self):
330 def tags(self):
332 '''return a mapping of tag to node'''
331 '''return a mapping of tag to node'''
333 if self._tags is None:
332 if self._tags is None:
334 (self._tags, self._tagtypes) = self._findtags()
333 (self._tags, self._tagtypes) = self._findtags()
335
334
336 return self._tags
335 return self._tags
337
336
338 def _findtags(self):
337 def _findtags(self):
339 '''Do the hard work of finding tags. Return a pair of dicts
338 '''Do the hard work of finding tags. Return a pair of dicts
340 (tags, tagtypes) where tags maps tag name to node, and tagtypes
339 (tags, tagtypes) where tags maps tag name to node, and tagtypes
341 maps tag name to a string like \'global\' or \'local\'.
340 maps tag name to a string like \'global\' or \'local\'.
342 Subclasses or extensions are free to add their own tags, but
341 Subclasses or extensions are free to add their own tags, but
343 should be aware that the returned dicts will be retained for the
342 should be aware that the returned dicts will be retained for the
344 duration of the localrepo object.'''
343 duration of the localrepo object.'''
345
344
346 # XXX what tagtype should subclasses/extensions use? Currently
345 # XXX what tagtype should subclasses/extensions use? Currently
347 # mq and bookmarks add tags, but do not set the tagtype at all.
346 # mq and bookmarks add tags, but do not set the tagtype at all.
348 # Should each extension invent its own tag type? Should there
347 # Should each extension invent its own tag type? Should there
349 # be one tagtype for all such "virtual" tags? Or is the status
348 # be one tagtype for all such "virtual" tags? Or is the status
350 # quo fine?
349 # quo fine?
351
350
352 alltags = {} # map tag name to (node, hist)
351 alltags = {} # map tag name to (node, hist)
353 tagtypes = {}
352 tagtypes = {}
354
353
355 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
354 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
356 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
355 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
357
356
358 # Build the return dicts. Have to re-encode tag names because
357 # Build the return dicts. Have to re-encode tag names because
359 # the tags module always uses UTF-8 (in order not to lose info
358 # the tags module always uses UTF-8 (in order not to lose info
360 # writing to the cache), but the rest of Mercurial wants them in
359 # writing to the cache), but the rest of Mercurial wants them in
361 # local encoding.
360 # local encoding.
362 tags = {}
361 tags = {}
363 for (name, (node, hist)) in alltags.iteritems():
362 for (name, (node, hist)) in alltags.iteritems():
364 if node != nullid:
363 if node != nullid:
365 try:
364 try:
366 # ignore tags to unknown nodes
365 # ignore tags to unknown nodes
367 self.changelog.lookup(node)
366 self.changelog.lookup(node)
368 tags[encoding.tolocal(name)] = node
367 tags[encoding.tolocal(name)] = node
369 except error.LookupError:
368 except error.LookupError:
370 pass
369 pass
371 tags['tip'] = self.changelog.tip()
370 tags['tip'] = self.changelog.tip()
372 tagtypes = dict([(encoding.tolocal(name), value)
371 tagtypes = dict([(encoding.tolocal(name), value)
373 for (name, value) in tagtypes.iteritems()])
372 for (name, value) in tagtypes.iteritems()])
374 return (tags, tagtypes)
373 return (tags, tagtypes)
375
374
376 def tagtype(self, tagname):
375 def tagtype(self, tagname):
377 '''
376 '''
378 return the type of the given tag. result can be:
377 return the type of the given tag. result can be:
379
378
380 'local' : a local tag
379 'local' : a local tag
381 'global' : a global tag
380 'global' : a global tag
382 None : tag does not exist
381 None : tag does not exist
383 '''
382 '''
384
383
385 self.tags()
384 self.tags()
386
385
387 return self._tagtypes.get(tagname)
386 return self._tagtypes.get(tagname)
388
387
389 def tagslist(self):
388 def tagslist(self):
390 '''return a list of tags ordered by revision'''
389 '''return a list of tags ordered by revision'''
391 l = []
390 l = []
392 for t, n in self.tags().iteritems():
391 for t, n in self.tags().iteritems():
393 r = self.changelog.rev(n)
392 r = self.changelog.rev(n)
394 l.append((r, t, n))
393 l.append((r, t, n))
395 return [(t, n) for r, t, n in sorted(l)]
394 return [(t, n) for r, t, n in sorted(l)]
396
395
397 def nodetags(self, node):
396 def nodetags(self, node):
398 '''return the tags associated with a node'''
397 '''return the tags associated with a node'''
399 if not self.nodetagscache:
398 if not self.nodetagscache:
400 self.nodetagscache = {}
399 self.nodetagscache = {}
401 for t, n in self.tags().iteritems():
400 for t, n in self.tags().iteritems():
402 self.nodetagscache.setdefault(n, []).append(t)
401 self.nodetagscache.setdefault(n, []).append(t)
403 for tags in self.nodetagscache.itervalues():
402 for tags in self.nodetagscache.itervalues():
404 tags.sort()
403 tags.sort()
405 return self.nodetagscache.get(node, [])
404 return self.nodetagscache.get(node, [])
406
405
407 def nodebookmarks(self, node):
406 def nodebookmarks(self, node):
408 marks = []
407 marks = []
409 for bookmark, n in self._bookmarks.iteritems():
408 for bookmark, n in self._bookmarks.iteritems():
410 if n == node:
409 if n == node:
411 marks.append(bookmark)
410 marks.append(bookmark)
412 return sorted(marks)
411 return sorted(marks)
413
412
414 def _branchtags(self, partial, lrev):
413 def _branchtags(self, partial, lrev):
415 # TODO: rename this function?
414 # TODO: rename this function?
416 tiprev = len(self) - 1
415 tiprev = len(self) - 1
417 if lrev != tiprev:
416 if lrev != tiprev:
418 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
417 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
419 self._updatebranchcache(partial, ctxgen)
418 self._updatebranchcache(partial, ctxgen)
420 self._writebranchcache(partial, self.changelog.tip(), tiprev)
419 self._writebranchcache(partial, self.changelog.tip(), tiprev)
421
420
422 return partial
421 return partial
423
422
424 def updatebranchcache(self):
423 def updatebranchcache(self):
425 tip = self.changelog.tip()
424 tip = self.changelog.tip()
426 if self._branchcache is not None and self._branchcachetip == tip:
425 if self._branchcache is not None and self._branchcachetip == tip:
427 return self._branchcache
426 return self._branchcache
428
427
429 oldtip = self._branchcachetip
428 oldtip = self._branchcachetip
430 self._branchcachetip = tip
429 self._branchcachetip = tip
431 if oldtip is None or oldtip not in self.changelog.nodemap:
430 if oldtip is None or oldtip not in self.changelog.nodemap:
432 partial, last, lrev = self._readbranchcache()
431 partial, last, lrev = self._readbranchcache()
433 else:
432 else:
434 lrev = self.changelog.rev(oldtip)
433 lrev = self.changelog.rev(oldtip)
435 partial = self._branchcache
434 partial = self._branchcache
436
435
437 self._branchtags(partial, lrev)
436 self._branchtags(partial, lrev)
438 # this private cache holds all heads (not just tips)
437 # this private cache holds all heads (not just tips)
439 self._branchcache = partial
438 self._branchcache = partial
440
439
441 def branchmap(self):
440 def branchmap(self):
442 '''returns a dictionary {branch: [branchheads]}'''
441 '''returns a dictionary {branch: [branchheads]}'''
443 self.updatebranchcache()
442 self.updatebranchcache()
444 return self._branchcache
443 return self._branchcache
445
444
446 def branchtags(self):
445 def branchtags(self):
447 '''return a dict where branch names map to the tipmost head of
446 '''return a dict where branch names map to the tipmost head of
448 the branch, open heads come before closed'''
447 the branch, open heads come before closed'''
449 bt = {}
448 bt = {}
450 for bn, heads in self.branchmap().iteritems():
449 for bn, heads in self.branchmap().iteritems():
451 tip = heads[-1]
450 tip = heads[-1]
452 for h in reversed(heads):
451 for h in reversed(heads):
453 if 'close' not in self.changelog.read(h)[5]:
452 if 'close' not in self.changelog.read(h)[5]:
454 tip = h
453 tip = h
455 break
454 break
456 bt[bn] = tip
455 bt[bn] = tip
457 return bt
456 return bt
458
457
459 def _readbranchcache(self):
458 def _readbranchcache(self):
460 partial = {}
459 partial = {}
461 try:
460 try:
462 f = self.opener("cache/branchheads")
461 f = self.opener("cache/branchheads")
463 lines = f.read().split('\n')
462 lines = f.read().split('\n')
464 f.close()
463 f.close()
465 except (IOError, OSError):
464 except (IOError, OSError):
466 return {}, nullid, nullrev
465 return {}, nullid, nullrev
467
466
468 try:
467 try:
469 last, lrev = lines.pop(0).split(" ", 1)
468 last, lrev = lines.pop(0).split(" ", 1)
470 last, lrev = bin(last), int(lrev)
469 last, lrev = bin(last), int(lrev)
471 if lrev >= len(self) or self[lrev].node() != last:
470 if lrev >= len(self) or self[lrev].node() != last:
472 # invalidate the cache
471 # invalidate the cache
473 raise ValueError('invalidating branch cache (tip differs)')
472 raise ValueError('invalidating branch cache (tip differs)')
474 for l in lines:
473 for l in lines:
475 if not l:
474 if not l:
476 continue
475 continue
477 node, label = l.split(" ", 1)
476 node, label = l.split(" ", 1)
478 label = encoding.tolocal(label.strip())
477 label = encoding.tolocal(label.strip())
479 partial.setdefault(label, []).append(bin(node))
478 partial.setdefault(label, []).append(bin(node))
480 except KeyboardInterrupt:
479 except KeyboardInterrupt:
481 raise
480 raise
482 except Exception, inst:
481 except Exception, inst:
483 if self.ui.debugflag:
482 if self.ui.debugflag:
484 self.ui.warn(str(inst), '\n')
483 self.ui.warn(str(inst), '\n')
485 partial, last, lrev = {}, nullid, nullrev
484 partial, last, lrev = {}, nullid, nullrev
486 return partial, last, lrev
485 return partial, last, lrev
487
486
488 def _writebranchcache(self, branches, tip, tiprev):
487 def _writebranchcache(self, branches, tip, tiprev):
489 try:
488 try:
490 f = self.opener("cache/branchheads", "w", atomictemp=True)
489 f = self.opener("cache/branchheads", "w", atomictemp=True)
491 f.write("%s %s\n" % (hex(tip), tiprev))
490 f.write("%s %s\n" % (hex(tip), tiprev))
492 for label, nodes in branches.iteritems():
491 for label, nodes in branches.iteritems():
493 for node in nodes:
492 for node in nodes:
494 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
493 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
495 f.rename()
494 f.rename()
496 except (IOError, OSError):
495 except (IOError, OSError):
497 pass
496 pass
498
497
499 def _updatebranchcache(self, partial, ctxgen):
498 def _updatebranchcache(self, partial, ctxgen):
500 # collect new branch entries
499 # collect new branch entries
501 newbranches = {}
500 newbranches = {}
502 for c in ctxgen:
501 for c in ctxgen:
503 newbranches.setdefault(c.branch(), []).append(c.node())
502 newbranches.setdefault(c.branch(), []).append(c.node())
504 # if older branchheads are reachable from new ones, they aren't
503 # if older branchheads are reachable from new ones, they aren't
505 # really branchheads. Note checking parents is insufficient:
504 # really branchheads. Note checking parents is insufficient:
506 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
505 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
507 for branch, newnodes in newbranches.iteritems():
506 for branch, newnodes in newbranches.iteritems():
508 bheads = partial.setdefault(branch, [])
507 bheads = partial.setdefault(branch, [])
509 bheads.extend(newnodes)
508 bheads.extend(newnodes)
510 if len(bheads) <= 1:
509 if len(bheads) <= 1:
511 continue
510 continue
512 bheads = sorted(bheads, key=lambda x: self[x].rev())
511 bheads = sorted(bheads, key=lambda x: self[x].rev())
513 # starting from tip means fewer passes over reachable
512 # starting from tip means fewer passes over reachable
514 while newnodes:
513 while newnodes:
515 latest = newnodes.pop()
514 latest = newnodes.pop()
516 if latest not in bheads:
515 if latest not in bheads:
517 continue
516 continue
518 minbhrev = self[bheads[0]].node()
517 minbhrev = self[bheads[0]].node()
519 reachable = self.changelog.reachable(latest, minbhrev)
518 reachable = self.changelog.reachable(latest, minbhrev)
520 reachable.remove(latest)
519 reachable.remove(latest)
521 if reachable:
520 if reachable:
522 bheads = [b for b in bheads if b not in reachable]
521 bheads = [b for b in bheads if b not in reachable]
523 partial[branch] = bheads
522 partial[branch] = bheads
524
523
525 def lookup(self, key):
524 def lookup(self, key):
526 if isinstance(key, int):
525 if isinstance(key, int):
527 return self.changelog.node(key)
526 return self.changelog.node(key)
528 elif key == '.':
527 elif key == '.':
529 return self.dirstate.p1()
528 return self.dirstate.p1()
530 elif key == 'null':
529 elif key == 'null':
531 return nullid
530 return nullid
532 elif key == 'tip':
531 elif key == 'tip':
533 return self.changelog.tip()
532 return self.changelog.tip()
534 n = self.changelog._match(key)
533 n = self.changelog._match(key)
535 if n:
534 if n:
536 return n
535 return n
537 if key in self._bookmarks:
536 if key in self._bookmarks:
538 return self._bookmarks[key]
537 return self._bookmarks[key]
539 if key in self.tags():
538 if key in self.tags():
540 return self.tags()[key]
539 return self.tags()[key]
541 if key in self.branchtags():
540 if key in self.branchtags():
542 return self.branchtags()[key]
541 return self.branchtags()[key]
543 n = self.changelog._partialmatch(key)
542 n = self.changelog._partialmatch(key)
544 if n:
543 if n:
545 return n
544 return n
546
545
547 # can't find key, check if it might have come from damaged dirstate
546 # can't find key, check if it might have come from damaged dirstate
548 if key in self.dirstate.parents():
547 if key in self.dirstate.parents():
549 raise error.Abort(_("working directory has unknown parent '%s'!")
548 raise error.Abort(_("working directory has unknown parent '%s'!")
550 % short(key))
549 % short(key))
551 try:
550 try:
552 if len(key) == 20:
551 if len(key) == 20:
553 key = hex(key)
552 key = hex(key)
554 except TypeError:
553 except TypeError:
555 pass
554 pass
556 raise error.RepoLookupError(_("unknown revision '%s'") % key)
555 raise error.RepoLookupError(_("unknown revision '%s'") % key)
557
556
558 def lookupbranch(self, key, remote=None):
557 def lookupbranch(self, key, remote=None):
559 repo = remote or self
558 repo = remote or self
560 if key in repo.branchmap():
559 if key in repo.branchmap():
561 return key
560 return key
562
561
563 repo = (remote and remote.local()) and remote or self
562 repo = (remote and remote.local()) and remote or self
564 return repo[key].branch()
563 return repo[key].branch()
565
564
566 def known(self, nodes):
565 def known(self, nodes):
567 nm = self.changelog.nodemap
566 nm = self.changelog.nodemap
568 return [(n in nm) for n in nodes]
567 return [(n in nm) for n in nodes]
569
568
570 def local(self):
569 def local(self):
571 return True
570 return True
572
571
573 def join(self, f):
572 def join(self, f):
574 return os.path.join(self.path, f)
573 return os.path.join(self.path, f)
575
574
576 def wjoin(self, f):
575 def wjoin(self, f):
577 return os.path.join(self.root, f)
576 return os.path.join(self.root, f)
578
577
579 def file(self, f):
578 def file(self, f):
580 if f[0] == '/':
579 if f[0] == '/':
581 f = f[1:]
580 f = f[1:]
582 return filelog.filelog(self.sopener, f)
581 return filelog.filelog(self.sopener, f)
583
582
584 def changectx(self, changeid):
583 def changectx(self, changeid):
585 return self[changeid]
584 return self[changeid]
586
585
587 def parents(self, changeid=None):
586 def parents(self, changeid=None):
588 '''get list of changectxs for parents of changeid'''
587 '''get list of changectxs for parents of changeid'''
589 return self[changeid].parents()
588 return self[changeid].parents()
590
589
591 def filectx(self, path, changeid=None, fileid=None):
590 def filectx(self, path, changeid=None, fileid=None):
592 """changeid can be a changeset revision, node, or tag.
591 """changeid can be a changeset revision, node, or tag.
593 fileid can be a file revision or node."""
592 fileid can be a file revision or node."""
594 return context.filectx(self, path, changeid, fileid)
593 return context.filectx(self, path, changeid, fileid)
595
594
596 def getcwd(self):
595 def getcwd(self):
597 return self.dirstate.getcwd()
596 return self.dirstate.getcwd()
598
597
599 def pathto(self, f, cwd=None):
598 def pathto(self, f, cwd=None):
600 return self.dirstate.pathto(f, cwd)
599 return self.dirstate.pathto(f, cwd)
601
600
602 def wfile(self, f, mode='r'):
601 def wfile(self, f, mode='r'):
603 return self.wopener(f, mode)
602 return self.wopener(f, mode)
604
603
605 def _link(self, f):
604 def _link(self, f):
606 return os.path.islink(self.wjoin(f))
605 return os.path.islink(self.wjoin(f))
607
606
608 def _loadfilter(self, filter):
607 def _loadfilter(self, filter):
609 if filter not in self.filterpats:
608 if filter not in self.filterpats:
610 l = []
609 l = []
611 for pat, cmd in self.ui.configitems(filter):
610 for pat, cmd in self.ui.configitems(filter):
612 if cmd == '!':
611 if cmd == '!':
613 continue
612 continue
614 mf = matchmod.match(self.root, '', [pat])
613 mf = matchmod.match(self.root, '', [pat])
615 fn = None
614 fn = None
616 params = cmd
615 params = cmd
617 for name, filterfn in self._datafilters.iteritems():
616 for name, filterfn in self._datafilters.iteritems():
618 if cmd.startswith(name):
617 if cmd.startswith(name):
619 fn = filterfn
618 fn = filterfn
620 params = cmd[len(name):].lstrip()
619 params = cmd[len(name):].lstrip()
621 break
620 break
622 if not fn:
621 if not fn:
623 fn = lambda s, c, **kwargs: util.filter(s, c)
622 fn = lambda s, c, **kwargs: util.filter(s, c)
624 # Wrap old filters not supporting keyword arguments
623 # Wrap old filters not supporting keyword arguments
625 if not inspect.getargspec(fn)[2]:
624 if not inspect.getargspec(fn)[2]:
626 oldfn = fn
625 oldfn = fn
627 fn = lambda s, c, **kwargs: oldfn(s, c)
626 fn = lambda s, c, **kwargs: oldfn(s, c)
628 l.append((mf, fn, params))
627 l.append((mf, fn, params))
629 self.filterpats[filter] = l
628 self.filterpats[filter] = l
630 return self.filterpats[filter]
629 return self.filterpats[filter]
631
630
632 def _filter(self, filterpats, filename, data):
631 def _filter(self, filterpats, filename, data):
633 for mf, fn, cmd in filterpats:
632 for mf, fn, cmd in filterpats:
634 if mf(filename):
633 if mf(filename):
635 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
634 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
636 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
635 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
637 break
636 break
638
637
639 return data
638 return data
640
639
641 @propertycache
640 @propertycache
642 def _encodefilterpats(self):
641 def _encodefilterpats(self):
643 return self._loadfilter('encode')
642 return self._loadfilter('encode')
644
643
645 @propertycache
644 @propertycache
646 def _decodefilterpats(self):
645 def _decodefilterpats(self):
647 return self._loadfilter('decode')
646 return self._loadfilter('decode')
648
647
649 def adddatafilter(self, name, filter):
648 def adddatafilter(self, name, filter):
650 self._datafilters[name] = filter
649 self._datafilters[name] = filter
651
650
652 def wread(self, filename):
651 def wread(self, filename):
653 if self._link(filename):
652 if self._link(filename):
654 data = os.readlink(self.wjoin(filename))
653 data = os.readlink(self.wjoin(filename))
655 else:
654 else:
656 data = self.wopener(filename, 'r').read()
655 data = self.wopener(filename, 'r').read()
657 return self._filter(self._encodefilterpats, filename, data)
656 return self._filter(self._encodefilterpats, filename, data)
658
657
659 def wwrite(self, filename, data, flags):
658 def wwrite(self, filename, data, flags):
660 data = self._filter(self._decodefilterpats, filename, data)
659 data = self._filter(self._decodefilterpats, filename, data)
661 if 'l' in flags:
660 if 'l' in flags:
662 self.wopener.symlink(data, filename)
661 self.wopener.symlink(data, filename)
663 else:
662 else:
664 self.wopener(filename, 'w').write(data)
663 self.wopener(filename, 'w').write(data)
665 if 'x' in flags:
664 if 'x' in flags:
666 util.set_flags(self.wjoin(filename), False, True)
665 util.set_flags(self.wjoin(filename), False, True)
667
666
668 def wwritedata(self, filename, data):
667 def wwritedata(self, filename, data):
669 return self._filter(self._decodefilterpats, filename, data)
668 return self._filter(self._decodefilterpats, filename, data)
670
669
671 def transaction(self, desc):
670 def transaction(self, desc):
672 tr = self._transref and self._transref() or None
671 tr = self._transref and self._transref() or None
673 if tr and tr.running():
672 if tr and tr.running():
674 return tr.nest()
673 return tr.nest()
675
674
676 # abort here if the journal already exists
675 # abort here if the journal already exists
677 if os.path.exists(self.sjoin("journal")):
676 if os.path.exists(self.sjoin("journal")):
678 raise error.RepoError(
677 raise error.RepoError(
679 _("abandoned transaction found - run hg recover"))
678 _("abandoned transaction found - run hg recover"))
680
679
681 # save dirstate for rollback
680 # save dirstate for rollback
682 try:
681 try:
683 ds = self.opener("dirstate").read()
682 ds = self.opener("dirstate").read()
684 except IOError:
683 except IOError:
685 ds = ""
684 ds = ""
686 self.opener("journal.dirstate", "w").write(ds)
685 self.opener("journal.dirstate", "w").write(ds)
687 self.opener("journal.branch", "w").write(
686 self.opener("journal.branch", "w").write(
688 encoding.fromlocal(self.dirstate.branch()))
687 encoding.fromlocal(self.dirstate.branch()))
689 self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc))
688 self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc))
690
689
691 renames = [(self.sjoin("journal"), self.sjoin("undo")),
690 renames = [(self.sjoin("journal"), self.sjoin("undo")),
692 (self.join("journal.dirstate"), self.join("undo.dirstate")),
691 (self.join("journal.dirstate"), self.join("undo.dirstate")),
693 (self.join("journal.branch"), self.join("undo.branch")),
692 (self.join("journal.branch"), self.join("undo.branch")),
694 (self.join("journal.desc"), self.join("undo.desc"))]
693 (self.join("journal.desc"), self.join("undo.desc"))]
695 tr = transaction.transaction(self.ui.warn, self.sopener,
694 tr = transaction.transaction(self.ui.warn, self.sopener,
696 self.sjoin("journal"),
695 self.sjoin("journal"),
697 aftertrans(renames),
696 aftertrans(renames),
698 self.store.createmode)
697 self.store.createmode)
699 self._transref = weakref.ref(tr)
698 self._transref = weakref.ref(tr)
700 return tr
699 return tr
701
700
702 def recover(self):
701 def recover(self):
703 lock = self.lock()
702 lock = self.lock()
704 try:
703 try:
705 if os.path.exists(self.sjoin("journal")):
704 if os.path.exists(self.sjoin("journal")):
706 self.ui.status(_("rolling back interrupted transaction\n"))
705 self.ui.status(_("rolling back interrupted transaction\n"))
707 transaction.rollback(self.sopener, self.sjoin("journal"),
706 transaction.rollback(self.sopener, self.sjoin("journal"),
708 self.ui.warn)
707 self.ui.warn)
709 self.invalidate()
708 self.invalidate()
710 return True
709 return True
711 else:
710 else:
712 self.ui.warn(_("no interrupted transaction available\n"))
711 self.ui.warn(_("no interrupted transaction available\n"))
713 return False
712 return False
714 finally:
713 finally:
715 lock.release()
714 lock.release()
716
715
717 def rollback(self, dryrun=False):
716 def rollback(self, dryrun=False):
718 wlock = lock = None
717 wlock = lock = None
719 try:
718 try:
720 wlock = self.wlock()
719 wlock = self.wlock()
721 lock = self.lock()
720 lock = self.lock()
722 if os.path.exists(self.sjoin("undo")):
721 if os.path.exists(self.sjoin("undo")):
723 try:
722 try:
724 args = self.opener("undo.desc", "r").read().splitlines()
723 args = self.opener("undo.desc", "r").read().splitlines()
725 if len(args) >= 3 and self.ui.verbose:
724 if len(args) >= 3 and self.ui.verbose:
726 desc = _("repository tip rolled back to revision %s"
725 desc = _("repository tip rolled back to revision %s"
727 " (undo %s: %s)\n") % (
726 " (undo %s: %s)\n") % (
728 int(args[0]) - 1, args[1], args[2])
727 int(args[0]) - 1, args[1], args[2])
729 elif len(args) >= 2:
728 elif len(args) >= 2:
730 desc = _("repository tip rolled back to revision %s"
729 desc = _("repository tip rolled back to revision %s"
731 " (undo %s)\n") % (
730 " (undo %s)\n") % (
732 int(args[0]) - 1, args[1])
731 int(args[0]) - 1, args[1])
733 except IOError:
732 except IOError:
734 desc = _("rolling back unknown transaction\n")
733 desc = _("rolling back unknown transaction\n")
735 self.ui.status(desc)
734 self.ui.status(desc)
736 if dryrun:
735 if dryrun:
737 return
736 return
738 transaction.rollback(self.sopener, self.sjoin("undo"),
737 transaction.rollback(self.sopener, self.sjoin("undo"),
739 self.ui.warn)
738 self.ui.warn)
740 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
739 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
741 if os.path.exists(self.join('undo.bookmarks')):
740 if os.path.exists(self.join('undo.bookmarks')):
742 util.rename(self.join('undo.bookmarks'),
741 util.rename(self.join('undo.bookmarks'),
743 self.join('bookmarks'))
742 self.join('bookmarks'))
744 try:
743 try:
745 branch = self.opener("undo.branch").read()
744 branch = self.opener("undo.branch").read()
746 self.dirstate.setbranch(branch)
745 self.dirstate.setbranch(branch)
747 except IOError:
746 except IOError:
748 self.ui.warn(_("named branch could not be reset, "
747 self.ui.warn(_("named branch could not be reset, "
749 "current branch is still: %s\n")
748 "current branch is still: %s\n")
750 % self.dirstate.branch())
749 % self.dirstate.branch())
751 self.invalidate()
750 self.invalidate()
752 self.dirstate.invalidate()
751 self.dirstate.invalidate()
753 self.destroyed()
752 self.destroyed()
754 parents = tuple([p.rev() for p in self.parents()])
753 parents = tuple([p.rev() for p in self.parents()])
755 if len(parents) > 1:
754 if len(parents) > 1:
756 self.ui.status(_("working directory now based on "
755 self.ui.status(_("working directory now based on "
757 "revisions %d and %d\n") % parents)
756 "revisions %d and %d\n") % parents)
758 else:
757 else:
759 self.ui.status(_("working directory now based on "
758 self.ui.status(_("working directory now based on "
760 "revision %d\n") % parents)
759 "revision %d\n") % parents)
761 else:
760 else:
762 self.ui.warn(_("no rollback information available\n"))
761 self.ui.warn(_("no rollback information available\n"))
763 return 1
762 return 1
764 finally:
763 finally:
765 release(lock, wlock)
764 release(lock, wlock)
766
765
767 def invalidatecaches(self):
766 def invalidatecaches(self):
768 self._tags = None
767 self._tags = None
769 self._tagtypes = None
768 self._tagtypes = None
770 self.nodetagscache = None
769 self.nodetagscache = None
771 self._branchcache = None # in UTF-8
770 self._branchcache = None # in UTF-8
772 self._branchcachetip = None
771 self._branchcachetip = None
773
772
774 def invalidate(self):
773 def invalidate(self):
775 for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
774 for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
776 if a in self.__dict__:
775 if a in self.__dict__:
777 delattr(self, a)
776 delattr(self, a)
778 self.invalidatecaches()
777 self.invalidatecaches()
779
778
780 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
779 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
781 try:
780 try:
782 l = lock.lock(lockname, 0, releasefn, desc=desc)
781 l = lock.lock(lockname, 0, releasefn, desc=desc)
783 except error.LockHeld, inst:
782 except error.LockHeld, inst:
784 if not wait:
783 if not wait:
785 raise
784 raise
786 self.ui.warn(_("waiting for lock on %s held by %r\n") %
785 self.ui.warn(_("waiting for lock on %s held by %r\n") %
787 (desc, inst.locker))
786 (desc, inst.locker))
788 # default to 600 seconds timeout
787 # default to 600 seconds timeout
789 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
788 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
790 releasefn, desc=desc)
789 releasefn, desc=desc)
791 if acquirefn:
790 if acquirefn:
792 acquirefn()
791 acquirefn()
793 return l
792 return l
794
793
795 def lock(self, wait=True):
794 def lock(self, wait=True):
796 '''Lock the repository store (.hg/store) and return a weak reference
795 '''Lock the repository store (.hg/store) and return a weak reference
797 to the lock. Use this before modifying the store (e.g. committing or
796 to the lock. Use this before modifying the store (e.g. committing or
798 stripping). If you are opening a transaction, get a lock as well.)'''
797 stripping). If you are opening a transaction, get a lock as well.)'''
799 l = self._lockref and self._lockref()
798 l = self._lockref and self._lockref()
800 if l is not None and l.held:
799 if l is not None and l.held:
801 l.lock()
800 l.lock()
802 return l
801 return l
803
802
804 l = self._lock(self.sjoin("lock"), wait, self.store.write,
803 l = self._lock(self.sjoin("lock"), wait, self.store.write,
805 self.invalidate, _('repository %s') % self.origroot)
804 self.invalidate, _('repository %s') % self.origroot)
806 self._lockref = weakref.ref(l)
805 self._lockref = weakref.ref(l)
807 return l
806 return l
808
807
809 def wlock(self, wait=True):
808 def wlock(self, wait=True):
810 '''Lock the non-store parts of the repository (everything under
809 '''Lock the non-store parts of the repository (everything under
811 .hg except .hg/store) and return a weak reference to the lock.
810 .hg except .hg/store) and return a weak reference to the lock.
812 Use this before modifying files in .hg.'''
811 Use this before modifying files in .hg.'''
813 l = self._wlockref and self._wlockref()
812 l = self._wlockref and self._wlockref()
814 if l is not None and l.held:
813 if l is not None and l.held:
815 l.lock()
814 l.lock()
816 return l
815 return l
817
816
818 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
817 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
819 self.dirstate.invalidate, _('working directory of %s') %
818 self.dirstate.invalidate, _('working directory of %s') %
820 self.origroot)
819 self.origroot)
821 self._wlockref = weakref.ref(l)
820 self._wlockref = weakref.ref(l)
822 return l
821 return l
823
822
824 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
823 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
825 """
824 """
826 commit an individual file as part of a larger transaction
825 commit an individual file as part of a larger transaction
827 """
826 """
828
827
829 fname = fctx.path()
828 fname = fctx.path()
830 text = fctx.data()
829 text = fctx.data()
831 flog = self.file(fname)
830 flog = self.file(fname)
832 fparent1 = manifest1.get(fname, nullid)
831 fparent1 = manifest1.get(fname, nullid)
833 fparent2 = fparent2o = manifest2.get(fname, nullid)
832 fparent2 = fparent2o = manifest2.get(fname, nullid)
834
833
835 meta = {}
834 meta = {}
836 copy = fctx.renamed()
835 copy = fctx.renamed()
837 if copy and copy[0] != fname:
836 if copy and copy[0] != fname:
838 # Mark the new revision of this file as a copy of another
837 # Mark the new revision of this file as a copy of another
839 # file. This copy data will effectively act as a parent
838 # file. This copy data will effectively act as a parent
840 # of this new revision. If this is a merge, the first
839 # of this new revision. If this is a merge, the first
841 # parent will be the nullid (meaning "look up the copy data")
840 # parent will be the nullid (meaning "look up the copy data")
842 # and the second one will be the other parent. For example:
841 # and the second one will be the other parent. For example:
843 #
842 #
844 # 0 --- 1 --- 3 rev1 changes file foo
843 # 0 --- 1 --- 3 rev1 changes file foo
845 # \ / rev2 renames foo to bar and changes it
844 # \ / rev2 renames foo to bar and changes it
846 # \- 2 -/ rev3 should have bar with all changes and
845 # \- 2 -/ rev3 should have bar with all changes and
847 # should record that bar descends from
846 # should record that bar descends from
848 # bar in rev2 and foo in rev1
847 # bar in rev2 and foo in rev1
849 #
848 #
850 # this allows this merge to succeed:
849 # this allows this merge to succeed:
851 #
850 #
852 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
851 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
853 # \ / merging rev3 and rev4 should use bar@rev2
852 # \ / merging rev3 and rev4 should use bar@rev2
854 # \- 2 --- 4 as the merge base
853 # \- 2 --- 4 as the merge base
855 #
854 #
856
855
857 cfname = copy[0]
856 cfname = copy[0]
858 crev = manifest1.get(cfname)
857 crev = manifest1.get(cfname)
859 newfparent = fparent2
858 newfparent = fparent2
860
859
861 if manifest2: # branch merge
860 if manifest2: # branch merge
862 if fparent2 == nullid or crev is None: # copied on remote side
861 if fparent2 == nullid or crev is None: # copied on remote side
863 if cfname in manifest2:
862 if cfname in manifest2:
864 crev = manifest2[cfname]
863 crev = manifest2[cfname]
865 newfparent = fparent1
864 newfparent = fparent1
866
865
867 # find source in nearest ancestor if we've lost track
866 # find source in nearest ancestor if we've lost track
868 if not crev:
867 if not crev:
869 self.ui.debug(" %s: searching for copy revision for %s\n" %
868 self.ui.debug(" %s: searching for copy revision for %s\n" %
870 (fname, cfname))
869 (fname, cfname))
871 for ancestor in self[None].ancestors():
870 for ancestor in self[None].ancestors():
872 if cfname in ancestor:
871 if cfname in ancestor:
873 crev = ancestor[cfname].filenode()
872 crev = ancestor[cfname].filenode()
874 break
873 break
875
874
876 if crev:
875 if crev:
877 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
876 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
878 meta["copy"] = cfname
877 meta["copy"] = cfname
879 meta["copyrev"] = hex(crev)
878 meta["copyrev"] = hex(crev)
880 fparent1, fparent2 = nullid, newfparent
879 fparent1, fparent2 = nullid, newfparent
881 else:
880 else:
882 self.ui.warn(_("warning: can't find ancestor for '%s' "
881 self.ui.warn(_("warning: can't find ancestor for '%s' "
883 "copied from '%s'!\n") % (fname, cfname))
882 "copied from '%s'!\n") % (fname, cfname))
884
883
885 elif fparent2 != nullid:
884 elif fparent2 != nullid:
886 # is one parent an ancestor of the other?
885 # is one parent an ancestor of the other?
887 fparentancestor = flog.ancestor(fparent1, fparent2)
886 fparentancestor = flog.ancestor(fparent1, fparent2)
888 if fparentancestor == fparent1:
887 if fparentancestor == fparent1:
889 fparent1, fparent2 = fparent2, nullid
888 fparent1, fparent2 = fparent2, nullid
890 elif fparentancestor == fparent2:
889 elif fparentancestor == fparent2:
891 fparent2 = nullid
890 fparent2 = nullid
892
891
893 # is the file changed?
892 # is the file changed?
894 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
893 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
895 changelist.append(fname)
894 changelist.append(fname)
896 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
895 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
897
896
898 # are just the flags changed during merge?
897 # are just the flags changed during merge?
899 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
898 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
900 changelist.append(fname)
899 changelist.append(fname)
901
900
902 return fparent1
901 return fparent1
903
902
904 def commit(self, text="", user=None, date=None, match=None, force=False,
903 def commit(self, text="", user=None, date=None, match=None, force=False,
905 editor=False, extra={}):
904 editor=False, extra={}):
906 """Add a new revision to current repository.
905 """Add a new revision to current repository.
907
906
908 Revision information is gathered from the working directory,
907 Revision information is gathered from the working directory,
909 match can be used to filter the committed files. If editor is
908 match can be used to filter the committed files. If editor is
910 supplied, it is called to get a commit message.
909 supplied, it is called to get a commit message.
911 """
910 """
912
911
913 def fail(f, msg):
912 def fail(f, msg):
914 raise util.Abort('%s: %s' % (f, msg))
913 raise util.Abort('%s: %s' % (f, msg))
915
914
916 if not match:
915 if not match:
917 match = matchmod.always(self.root, '')
916 match = matchmod.always(self.root, '')
918
917
919 if not force:
918 if not force:
920 vdirs = []
919 vdirs = []
921 match.dir = vdirs.append
920 match.dir = vdirs.append
922 match.bad = fail
921 match.bad = fail
923
922
924 wlock = self.wlock()
923 wlock = self.wlock()
925 try:
924 try:
926 wctx = self[None]
925 wctx = self[None]
927 merge = len(wctx.parents()) > 1
926 merge = len(wctx.parents()) > 1
928
927
929 if (not force and merge and match and
928 if (not force and merge and match and
930 (match.files() or match.anypats())):
929 (match.files() or match.anypats())):
931 raise util.Abort(_('cannot partially commit a merge '
930 raise util.Abort(_('cannot partially commit a merge '
932 '(do not specify files or patterns)'))
931 '(do not specify files or patterns)'))
933
932
934 changes = self.status(match=match, clean=force)
933 changes = self.status(match=match, clean=force)
935 if force:
934 if force:
936 changes[0].extend(changes[6]) # mq may commit unchanged files
935 changes[0].extend(changes[6]) # mq may commit unchanged files
937
936
938 # check subrepos
937 # check subrepos
939 subs = []
938 subs = []
940 removedsubs = set()
939 removedsubs = set()
941 for p in wctx.parents():
940 for p in wctx.parents():
942 removedsubs.update(s for s in p.substate if match(s))
941 removedsubs.update(s for s in p.substate if match(s))
943 for s in wctx.substate:
942 for s in wctx.substate:
944 removedsubs.discard(s)
943 removedsubs.discard(s)
945 if match(s) and wctx.sub(s).dirty():
944 if match(s) and wctx.sub(s).dirty():
946 subs.append(s)
945 subs.append(s)
947 if (subs or removedsubs):
946 if (subs or removedsubs):
948 if (not match('.hgsub') and
947 if (not match('.hgsub') and
949 '.hgsub' in (wctx.modified() + wctx.added())):
948 '.hgsub' in (wctx.modified() + wctx.added())):
950 raise util.Abort(_("can't commit subrepos without .hgsub"))
949 raise util.Abort(_("can't commit subrepos without .hgsub"))
951 if '.hgsubstate' not in changes[0]:
950 if '.hgsubstate' not in changes[0]:
952 changes[0].insert(0, '.hgsubstate')
951 changes[0].insert(0, '.hgsubstate')
953
952
954 if subs and not self.ui.configbool('ui', 'commitsubrepos', True):
953 if subs and not self.ui.configbool('ui', 'commitsubrepos', True):
955 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
954 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
956 if changedsubs:
955 if changedsubs:
957 raise util.Abort(_("uncommitted changes in subrepo %s")
956 raise util.Abort(_("uncommitted changes in subrepo %s")
958 % changedsubs[0])
957 % changedsubs[0])
959
958
960 # make sure all explicit patterns are matched
959 # make sure all explicit patterns are matched
961 if not force and match.files():
960 if not force and match.files():
962 matched = set(changes[0] + changes[1] + changes[2])
961 matched = set(changes[0] + changes[1] + changes[2])
963
962
964 for f in match.files():
963 for f in match.files():
965 if f == '.' or f in matched or f in wctx.substate:
964 if f == '.' or f in matched or f in wctx.substate:
966 continue
965 continue
967 if f in changes[3]: # missing
966 if f in changes[3]: # missing
968 fail(f, _('file not found!'))
967 fail(f, _('file not found!'))
969 if f in vdirs: # visited directory
968 if f in vdirs: # visited directory
970 d = f + '/'
969 d = f + '/'
971 for mf in matched:
970 for mf in matched:
972 if mf.startswith(d):
971 if mf.startswith(d):
973 break
972 break
974 else:
973 else:
975 fail(f, _("no match under directory!"))
974 fail(f, _("no match under directory!"))
976 elif f not in self.dirstate:
975 elif f not in self.dirstate:
977 fail(f, _("file not tracked!"))
976 fail(f, _("file not tracked!"))
978
977
979 if (not force and not extra.get("close") and not merge
978 if (not force and not extra.get("close") and not merge
980 and not (changes[0] or changes[1] or changes[2])
979 and not (changes[0] or changes[1] or changes[2])
981 and wctx.branch() == wctx.p1().branch()):
980 and wctx.branch() == wctx.p1().branch()):
982 return None
981 return None
983
982
984 ms = mergemod.mergestate(self)
983 ms = mergemod.mergestate(self)
985 for f in changes[0]:
984 for f in changes[0]:
986 if f in ms and ms[f] == 'u':
985 if f in ms and ms[f] == 'u':
987 raise util.Abort(_("unresolved merge conflicts "
986 raise util.Abort(_("unresolved merge conflicts "
988 "(see hg help resolve)"))
987 "(see hg help resolve)"))
989
988
990 cctx = context.workingctx(self, text, user, date, extra, changes)
989 cctx = context.workingctx(self, text, user, date, extra, changes)
991 if editor:
990 if editor:
992 cctx._text = editor(self, cctx, subs)
991 cctx._text = editor(self, cctx, subs)
993 edited = (text != cctx._text)
992 edited = (text != cctx._text)
994
993
995 # commit subs
994 # commit subs
996 if subs or removedsubs:
995 if subs or removedsubs:
997 state = wctx.substate.copy()
996 state = wctx.substate.copy()
998 for s in sorted(subs):
997 for s in sorted(subs):
999 sub = wctx.sub(s)
998 sub = wctx.sub(s)
1000 self.ui.status(_('committing subrepository %s\n') %
999 self.ui.status(_('committing subrepository %s\n') %
1001 subrepo.subrelpath(sub))
1000 subrepo.subrelpath(sub))
1002 sr = sub.commit(cctx._text, user, date)
1001 sr = sub.commit(cctx._text, user, date)
1003 state[s] = (state[s][0], sr)
1002 state[s] = (state[s][0], sr)
1004 subrepo.writestate(self, state)
1003 subrepo.writestate(self, state)
1005
1004
1006 # Save commit message in case this transaction gets rolled back
1005 # Save commit message in case this transaction gets rolled back
1007 # (e.g. by a pretxncommit hook). Leave the content alone on
1006 # (e.g. by a pretxncommit hook). Leave the content alone on
1008 # the assumption that the user will use the same editor again.
1007 # the assumption that the user will use the same editor again.
1009 msgfile = self.opener('last-message.txt', 'wb')
1008 msgfile = self.opener('last-message.txt', 'wb')
1010 msgfile.write(cctx._text)
1009 msgfile.write(cctx._text)
1011 msgfile.close()
1010 msgfile.close()
1012
1011
1013 p1, p2 = self.dirstate.parents()
1012 p1, p2 = self.dirstate.parents()
1014 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1013 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1015 try:
1014 try:
1016 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1015 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1017 ret = self.commitctx(cctx, True)
1016 ret = self.commitctx(cctx, True)
1018 except:
1017 except:
1019 if edited:
1018 if edited:
1020 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
1019 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
1021 self.ui.write(
1020 self.ui.write(
1022 _('note: commit message saved in %s\n') % msgfn)
1021 _('note: commit message saved in %s\n') % msgfn)
1023 raise
1022 raise
1024
1023
1025 # update bookmarks, dirstate and mergestate
1024 # update bookmarks, dirstate and mergestate
1026 bookmarks.update(self, p1, ret)
1025 bookmarks.update(self, p1, ret)
1027 for f in changes[0] + changes[1]:
1026 for f in changes[0] + changes[1]:
1028 self.dirstate.normal(f)
1027 self.dirstate.normal(f)
1029 for f in changes[2]:
1028 for f in changes[2]:
1030 self.dirstate.forget(f)
1029 self.dirstate.forget(f)
1031 self.dirstate.setparents(ret)
1030 self.dirstate.setparents(ret)
1032 ms.reset()
1031 ms.reset()
1033 finally:
1032 finally:
1034 wlock.release()
1033 wlock.release()
1035
1034
1036 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1035 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1037 return ret
1036 return ret
1038
1037
1039 def commitctx(self, ctx, error=False):
1038 def commitctx(self, ctx, error=False):
1040 """Add a new revision to current repository.
1039 """Add a new revision to current repository.
1041 Revision information is passed via the context argument.
1040 Revision information is passed via the context argument.
1042 """
1041 """
1043
1042
1044 tr = lock = None
1043 tr = lock = None
1045 removed = list(ctx.removed())
1044 removed = list(ctx.removed())
1046 p1, p2 = ctx.p1(), ctx.p2()
1045 p1, p2 = ctx.p1(), ctx.p2()
1047 m1 = p1.manifest().copy()
1046 m1 = p1.manifest().copy()
1048 m2 = p2.manifest()
1047 m2 = p2.manifest()
1049 user = ctx.user()
1048 user = ctx.user()
1050
1049
1051 lock = self.lock()
1050 lock = self.lock()
1052 try:
1051 try:
1053 tr = self.transaction("commit")
1052 tr = self.transaction("commit")
1054 trp = weakref.proxy(tr)
1053 trp = weakref.proxy(tr)
1055
1054
1056 # check in files
1055 # check in files
1057 new = {}
1056 new = {}
1058 changed = []
1057 changed = []
1059 linkrev = len(self)
1058 linkrev = len(self)
1060 for f in sorted(ctx.modified() + ctx.added()):
1059 for f in sorted(ctx.modified() + ctx.added()):
1061 self.ui.note(f + "\n")
1060 self.ui.note(f + "\n")
1062 try:
1061 try:
1063 fctx = ctx[f]
1062 fctx = ctx[f]
1064 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1063 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1065 changed)
1064 changed)
1066 m1.set(f, fctx.flags())
1065 m1.set(f, fctx.flags())
1067 except OSError, inst:
1066 except OSError, inst:
1068 self.ui.warn(_("trouble committing %s!\n") % f)
1067 self.ui.warn(_("trouble committing %s!\n") % f)
1069 raise
1068 raise
1070 except IOError, inst:
1069 except IOError, inst:
1071 errcode = getattr(inst, 'errno', errno.ENOENT)
1070 errcode = getattr(inst, 'errno', errno.ENOENT)
1072 if error or errcode and errcode != errno.ENOENT:
1071 if error or errcode and errcode != errno.ENOENT:
1073 self.ui.warn(_("trouble committing %s!\n") % f)
1072 self.ui.warn(_("trouble committing %s!\n") % f)
1074 raise
1073 raise
1075 else:
1074 else:
1076 removed.append(f)
1075 removed.append(f)
1077
1076
1078 # update manifest
1077 # update manifest
1079 m1.update(new)
1078 m1.update(new)
1080 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1079 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1081 drop = [f for f in removed if f in m1]
1080 drop = [f for f in removed if f in m1]
1082 for f in drop:
1081 for f in drop:
1083 del m1[f]
1082 del m1[f]
1084 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1083 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1085 p2.manifestnode(), (new, drop))
1084 p2.manifestnode(), (new, drop))
1086
1085
1087 # update changelog
1086 # update changelog
1088 self.changelog.delayupdate()
1087 self.changelog.delayupdate()
1089 n = self.changelog.add(mn, changed + removed, ctx.description(),
1088 n = self.changelog.add(mn, changed + removed, ctx.description(),
1090 trp, p1.node(), p2.node(),
1089 trp, p1.node(), p2.node(),
1091 user, ctx.date(), ctx.extra().copy())
1090 user, ctx.date(), ctx.extra().copy())
1092 p = lambda: self.changelog.writepending() and self.root or ""
1091 p = lambda: self.changelog.writepending() and self.root or ""
1093 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1092 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1094 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1093 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1095 parent2=xp2, pending=p)
1094 parent2=xp2, pending=p)
1096 self.changelog.finalize(trp)
1095 self.changelog.finalize(trp)
1097 tr.close()
1096 tr.close()
1098
1097
1099 if self._branchcache:
1098 if self._branchcache:
1100 self.updatebranchcache()
1099 self.updatebranchcache()
1101 return n
1100 return n
1102 finally:
1101 finally:
1103 if tr:
1102 if tr:
1104 tr.release()
1103 tr.release()
1105 lock.release()
1104 lock.release()
1106
1105
1107 def destroyed(self):
1106 def destroyed(self):
1108 '''Inform the repository that nodes have been destroyed.
1107 '''Inform the repository that nodes have been destroyed.
1109 Intended for use by strip and rollback, so there's a common
1108 Intended for use by strip and rollback, so there's a common
1110 place for anything that has to be done after destroying history.'''
1109 place for anything that has to be done after destroying history.'''
1111 # XXX it might be nice if we could take the list of destroyed
1110 # XXX it might be nice if we could take the list of destroyed
1112 # nodes, but I don't see an easy way for rollback() to do that
1111 # nodes, but I don't see an easy way for rollback() to do that
1113
1112
1114 # Ensure the persistent tag cache is updated. Doing it now
1113 # Ensure the persistent tag cache is updated. Doing it now
1115 # means that the tag cache only has to worry about destroyed
1114 # means that the tag cache only has to worry about destroyed
1116 # heads immediately after a strip/rollback. That in turn
1115 # heads immediately after a strip/rollback. That in turn
1117 # guarantees that "cachetip == currenttip" (comparing both rev
1116 # guarantees that "cachetip == currenttip" (comparing both rev
1118 # and node) always means no nodes have been added or destroyed.
1117 # and node) always means no nodes have been added or destroyed.
1119
1118
1120 # XXX this is suboptimal when qrefresh'ing: we strip the current
1119 # XXX this is suboptimal when qrefresh'ing: we strip the current
1121 # head, refresh the tag cache, then immediately add a new head.
1120 # head, refresh the tag cache, then immediately add a new head.
1122 # But I think doing it this way is necessary for the "instant
1121 # But I think doing it this way is necessary for the "instant
1123 # tag cache retrieval" case to work.
1122 # tag cache retrieval" case to work.
1124 self.invalidatecaches()
1123 self.invalidatecaches()
1125
1124
1126 def walk(self, match, node=None):
1125 def walk(self, match, node=None):
1127 '''
1126 '''
1128 walk recursively through the directory tree or a given
1127 walk recursively through the directory tree or a given
1129 changeset, finding all files matched by the match
1128 changeset, finding all files matched by the match
1130 function
1129 function
1131 '''
1130 '''
1132 return self[node].walk(match)
1131 return self[node].walk(match)
1133
1132
1134 def status(self, node1='.', node2=None, match=None,
1133 def status(self, node1='.', node2=None, match=None,
1135 ignored=False, clean=False, unknown=False,
1134 ignored=False, clean=False, unknown=False,
1136 listsubrepos=False):
1135 listsubrepos=False):
1137 """return status of files between two nodes or node and working directory
1136 """return status of files between two nodes or node and working directory
1138
1137
1139 If node1 is None, use the first dirstate parent instead.
1138 If node1 is None, use the first dirstate parent instead.
1140 If node2 is None, compare node1 with working directory.
1139 If node2 is None, compare node1 with working directory.
1141 """
1140 """
1142
1141
1143 def mfmatches(ctx):
1142 def mfmatches(ctx):
1144 mf = ctx.manifest().copy()
1143 mf = ctx.manifest().copy()
1145 for fn in mf.keys():
1144 for fn in mf.keys():
1146 if not match(fn):
1145 if not match(fn):
1147 del mf[fn]
1146 del mf[fn]
1148 return mf
1147 return mf
1149
1148
1150 if isinstance(node1, context.changectx):
1149 if isinstance(node1, context.changectx):
1151 ctx1 = node1
1150 ctx1 = node1
1152 else:
1151 else:
1153 ctx1 = self[node1]
1152 ctx1 = self[node1]
1154 if isinstance(node2, context.changectx):
1153 if isinstance(node2, context.changectx):
1155 ctx2 = node2
1154 ctx2 = node2
1156 else:
1155 else:
1157 ctx2 = self[node2]
1156 ctx2 = self[node2]
1158
1157
1159 working = ctx2.rev() is None
1158 working = ctx2.rev() is None
1160 parentworking = working and ctx1 == self['.']
1159 parentworking = working and ctx1 == self['.']
1161 match = match or matchmod.always(self.root, self.getcwd())
1160 match = match or matchmod.always(self.root, self.getcwd())
1162 listignored, listclean, listunknown = ignored, clean, unknown
1161 listignored, listclean, listunknown = ignored, clean, unknown
1163
1162
1164 # load earliest manifest first for caching reasons
1163 # load earliest manifest first for caching reasons
1165 if not working and ctx2.rev() < ctx1.rev():
1164 if not working and ctx2.rev() < ctx1.rev():
1166 ctx2.manifest()
1165 ctx2.manifest()
1167
1166
1168 if not parentworking:
1167 if not parentworking:
1169 def bad(f, msg):
1168 def bad(f, msg):
1170 if f not in ctx1:
1169 if f not in ctx1:
1171 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1170 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1172 match.bad = bad
1171 match.bad = bad
1173
1172
1174 if working: # we need to scan the working dir
1173 if working: # we need to scan the working dir
1175 subrepos = []
1174 subrepos = []
1176 if '.hgsub' in self.dirstate:
1175 if '.hgsub' in self.dirstate:
1177 subrepos = ctx1.substate.keys()
1176 subrepos = ctx1.substate.keys()
1178 s = self.dirstate.status(match, subrepos, listignored,
1177 s = self.dirstate.status(match, subrepos, listignored,
1179 listclean, listunknown)
1178 listclean, listunknown)
1180 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1179 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1181
1180
1182 # check for any possibly clean files
1181 # check for any possibly clean files
1183 if parentworking and cmp:
1182 if parentworking and cmp:
1184 fixup = []
1183 fixup = []
1185 # do a full compare of any files that might have changed
1184 # do a full compare of any files that might have changed
1186 for f in sorted(cmp):
1185 for f in sorted(cmp):
1187 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1186 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1188 or ctx1[f].cmp(ctx2[f])):
1187 or ctx1[f].cmp(ctx2[f])):
1189 modified.append(f)
1188 modified.append(f)
1190 else:
1189 else:
1191 fixup.append(f)
1190 fixup.append(f)
1192
1191
1193 # update dirstate for files that are actually clean
1192 # update dirstate for files that are actually clean
1194 if fixup:
1193 if fixup:
1195 if listclean:
1194 if listclean:
1196 clean += fixup
1195 clean += fixup
1197
1196
1198 try:
1197 try:
1199 # updating the dirstate is optional
1198 # updating the dirstate is optional
1200 # so we don't wait on the lock
1199 # so we don't wait on the lock
1201 wlock = self.wlock(False)
1200 wlock = self.wlock(False)
1202 try:
1201 try:
1203 for f in fixup:
1202 for f in fixup:
1204 self.dirstate.normal(f)
1203 self.dirstate.normal(f)
1205 finally:
1204 finally:
1206 wlock.release()
1205 wlock.release()
1207 except error.LockError:
1206 except error.LockError:
1208 pass
1207 pass
1209
1208
1210 if not parentworking:
1209 if not parentworking:
1211 mf1 = mfmatches(ctx1)
1210 mf1 = mfmatches(ctx1)
1212 if working:
1211 if working:
1213 # we are comparing working dir against non-parent
1212 # we are comparing working dir against non-parent
1214 # generate a pseudo-manifest for the working dir
1213 # generate a pseudo-manifest for the working dir
1215 mf2 = mfmatches(self['.'])
1214 mf2 = mfmatches(self['.'])
1216 for f in cmp + modified + added:
1215 for f in cmp + modified + added:
1217 mf2[f] = None
1216 mf2[f] = None
1218 mf2.set(f, ctx2.flags(f))
1217 mf2.set(f, ctx2.flags(f))
1219 for f in removed:
1218 for f in removed:
1220 if f in mf2:
1219 if f in mf2:
1221 del mf2[f]
1220 del mf2[f]
1222 else:
1221 else:
1223 # we are comparing two revisions
1222 # we are comparing two revisions
1224 deleted, unknown, ignored = [], [], []
1223 deleted, unknown, ignored = [], [], []
1225 mf2 = mfmatches(ctx2)
1224 mf2 = mfmatches(ctx2)
1226
1225
1227 modified, added, clean = [], [], []
1226 modified, added, clean = [], [], []
1228 for fn in mf2:
1227 for fn in mf2:
1229 if fn in mf1:
1228 if fn in mf1:
1230 if (fn not in deleted and
1229 if (fn not in deleted and
1231 (mf1.flags(fn) != mf2.flags(fn) or
1230 (mf1.flags(fn) != mf2.flags(fn) or
1232 (mf1[fn] != mf2[fn] and
1231 (mf1[fn] != mf2[fn] and
1233 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1232 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1234 modified.append(fn)
1233 modified.append(fn)
1235 elif listclean:
1234 elif listclean:
1236 clean.append(fn)
1235 clean.append(fn)
1237 del mf1[fn]
1236 del mf1[fn]
1238 elif fn not in deleted:
1237 elif fn not in deleted:
1239 added.append(fn)
1238 added.append(fn)
1240 removed = mf1.keys()
1239 removed = mf1.keys()
1241
1240
1242 r = modified, added, removed, deleted, unknown, ignored, clean
1241 r = modified, added, removed, deleted, unknown, ignored, clean
1243
1242
1244 if listsubrepos:
1243 if listsubrepos:
1245 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1244 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1246 if working:
1245 if working:
1247 rev2 = None
1246 rev2 = None
1248 else:
1247 else:
1249 rev2 = ctx2.substate[subpath][1]
1248 rev2 = ctx2.substate[subpath][1]
1250 try:
1249 try:
1251 submatch = matchmod.narrowmatcher(subpath, match)
1250 submatch = matchmod.narrowmatcher(subpath, match)
1252 s = sub.status(rev2, match=submatch, ignored=listignored,
1251 s = sub.status(rev2, match=submatch, ignored=listignored,
1253 clean=listclean, unknown=listunknown,
1252 clean=listclean, unknown=listunknown,
1254 listsubrepos=True)
1253 listsubrepos=True)
1255 for rfiles, sfiles in zip(r, s):
1254 for rfiles, sfiles in zip(r, s):
1256 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1255 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1257 except error.LookupError:
1256 except error.LookupError:
1258 self.ui.status(_("skipping missing subrepository: %s\n")
1257 self.ui.status(_("skipping missing subrepository: %s\n")
1259 % subpath)
1258 % subpath)
1260
1259
1261 for l in r:
1260 for l in r:
1262 l.sort()
1261 l.sort()
1263 return r
1262 return r
1264
1263
1265 def heads(self, start=None):
1264 def heads(self, start=None):
1266 heads = self.changelog.heads(start)
1265 heads = self.changelog.heads(start)
1267 # sort the output in rev descending order
1266 # sort the output in rev descending order
1268 return sorted(heads, key=self.changelog.rev, reverse=True)
1267 return sorted(heads, key=self.changelog.rev, reverse=True)
1269
1268
1270 def branchheads(self, branch=None, start=None, closed=False):
1269 def branchheads(self, branch=None, start=None, closed=False):
1271 '''return a (possibly filtered) list of heads for the given branch
1270 '''return a (possibly filtered) list of heads for the given branch
1272
1271
1273 Heads are returned in topological order, from newest to oldest.
1272 Heads are returned in topological order, from newest to oldest.
1274 If branch is None, use the dirstate branch.
1273 If branch is None, use the dirstate branch.
1275 If start is not None, return only heads reachable from start.
1274 If start is not None, return only heads reachable from start.
1276 If closed is True, return heads that are marked as closed as well.
1275 If closed is True, return heads that are marked as closed as well.
1277 '''
1276 '''
1278 if branch is None:
1277 if branch is None:
1279 branch = self[None].branch()
1278 branch = self[None].branch()
1280 branches = self.branchmap()
1279 branches = self.branchmap()
1281 if branch not in branches:
1280 if branch not in branches:
1282 return []
1281 return []
1283 # the cache returns heads ordered lowest to highest
1282 # the cache returns heads ordered lowest to highest
1284 bheads = list(reversed(branches[branch]))
1283 bheads = list(reversed(branches[branch]))
1285 if start is not None:
1284 if start is not None:
1286 # filter out the heads that cannot be reached from startrev
1285 # filter out the heads that cannot be reached from startrev
1287 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1286 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1288 bheads = [h for h in bheads if h in fbheads]
1287 bheads = [h for h in bheads if h in fbheads]
1289 if not closed:
1288 if not closed:
1290 bheads = [h for h in bheads if
1289 bheads = [h for h in bheads if
1291 ('close' not in self.changelog.read(h)[5])]
1290 ('close' not in self.changelog.read(h)[5])]
1292 return bheads
1291 return bheads
1293
1292
1294 def branches(self, nodes):
1293 def branches(self, nodes):
1295 if not nodes:
1294 if not nodes:
1296 nodes = [self.changelog.tip()]
1295 nodes = [self.changelog.tip()]
1297 b = []
1296 b = []
1298 for n in nodes:
1297 for n in nodes:
1299 t = n
1298 t = n
1300 while 1:
1299 while 1:
1301 p = self.changelog.parents(n)
1300 p = self.changelog.parents(n)
1302 if p[1] != nullid or p[0] == nullid:
1301 if p[1] != nullid or p[0] == nullid:
1303 b.append((t, n, p[0], p[1]))
1302 b.append((t, n, p[0], p[1]))
1304 break
1303 break
1305 n = p[0]
1304 n = p[0]
1306 return b
1305 return b
1307
1306
1308 def between(self, pairs):
1307 def between(self, pairs):
1309 r = []
1308 r = []
1310
1309
1311 for top, bottom in pairs:
1310 for top, bottom in pairs:
1312 n, l, i = top, [], 0
1311 n, l, i = top, [], 0
1313 f = 1
1312 f = 1
1314
1313
1315 while n != bottom and n != nullid:
1314 while n != bottom and n != nullid:
1316 p = self.changelog.parents(n)[0]
1315 p = self.changelog.parents(n)[0]
1317 if i == f:
1316 if i == f:
1318 l.append(n)
1317 l.append(n)
1319 f = f * 2
1318 f = f * 2
1320 n = p
1319 n = p
1321 i += 1
1320 i += 1
1322
1321
1323 r.append(l)
1322 r.append(l)
1324
1323
1325 return r
1324 return r
1326
1325
1327 def pull(self, remote, heads=None, force=False):
1326 def pull(self, remote, heads=None, force=False):
1328 lock = self.lock()
1327 lock = self.lock()
1329 try:
1328 try:
1330 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1329 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1331 force=force)
1330 force=force)
1332 common, fetch, rheads = tmp
1331 common, fetch, rheads = tmp
1333 if not fetch:
1332 if not fetch:
1334 self.ui.status(_("no changes found\n"))
1333 self.ui.status(_("no changes found\n"))
1335 result = 0
1334 result = 0
1336 else:
1335 else:
1337 if heads is None and list(common) == [nullid]:
1336 if heads is None and list(common) == [nullid]:
1338 self.ui.status(_("requesting all changes\n"))
1337 self.ui.status(_("requesting all changes\n"))
1339 elif heads is None and remote.capable('changegroupsubset'):
1338 elif heads is None and remote.capable('changegroupsubset'):
1340 # issue1320, avoid a race if remote changed after discovery
1339 # issue1320, avoid a race if remote changed after discovery
1341 heads = rheads
1340 heads = rheads
1342
1341
1343 if remote.capable('getbundle'):
1342 if remote.capable('getbundle'):
1344 cg = remote.getbundle('pull', common=common,
1343 cg = remote.getbundle('pull', common=common,
1345 heads=heads or rheads)
1344 heads=heads or rheads)
1346 elif heads is None:
1345 elif heads is None:
1347 cg = remote.changegroup(fetch, 'pull')
1346 cg = remote.changegroup(fetch, 'pull')
1348 elif not remote.capable('changegroupsubset'):
1347 elif not remote.capable('changegroupsubset'):
1349 raise util.Abort(_("partial pull cannot be done because "
1348 raise util.Abort(_("partial pull cannot be done because "
1350 "other repository doesn't support "
1349 "other repository doesn't support "
1351 "changegroupsubset."))
1350 "changegroupsubset."))
1352 else:
1351 else:
1353 cg = remote.changegroupsubset(fetch, heads, 'pull')
1352 cg = remote.changegroupsubset(fetch, heads, 'pull')
1354 result = self.addchangegroup(cg, 'pull', remote.url(),
1353 result = self.addchangegroup(cg, 'pull', remote.url(),
1355 lock=lock)
1354 lock=lock)
1356 finally:
1355 finally:
1357 lock.release()
1356 lock.release()
1358
1357
1359 return result
1358 return result
1360
1359
1361 def checkpush(self, force, revs):
1360 def checkpush(self, force, revs):
1362 """Extensions can override this function if additional checks have
1361 """Extensions can override this function if additional checks have
1363 to be performed before pushing, or call it if they override push
1362 to be performed before pushing, or call it if they override push
1364 command.
1363 command.
1365 """
1364 """
1366 pass
1365 pass
1367
1366
1368 def push(self, remote, force=False, revs=None, newbranch=False):
1367 def push(self, remote, force=False, revs=None, newbranch=False):
1369 '''Push outgoing changesets (limited by revs) from the current
1368 '''Push outgoing changesets (limited by revs) from the current
1370 repository to remote. Return an integer:
1369 repository to remote. Return an integer:
1371 - 0 means HTTP error *or* nothing to push
1370 - 0 means HTTP error *or* nothing to push
1372 - 1 means we pushed and remote head count is unchanged *or*
1371 - 1 means we pushed and remote head count is unchanged *or*
1373 we have outgoing changesets but refused to push
1372 we have outgoing changesets but refused to push
1374 - other values as described by addchangegroup()
1373 - other values as described by addchangegroup()
1375 '''
1374 '''
1376 # there are two ways to push to remote repo:
1375 # there are two ways to push to remote repo:
1377 #
1376 #
1378 # addchangegroup assumes local user can lock remote
1377 # addchangegroup assumes local user can lock remote
1379 # repo (local filesystem, old ssh servers).
1378 # repo (local filesystem, old ssh servers).
1380 #
1379 #
1381 # unbundle assumes local user cannot lock remote repo (new ssh
1380 # unbundle assumes local user cannot lock remote repo (new ssh
1382 # servers, http servers).
1381 # servers, http servers).
1383
1382
1384 self.checkpush(force, revs)
1383 self.checkpush(force, revs)
1385 lock = None
1384 lock = None
1386 unbundle = remote.capable('unbundle')
1385 unbundle = remote.capable('unbundle')
1387 if not unbundle:
1386 if not unbundle:
1388 lock = remote.lock()
1387 lock = remote.lock()
1389 try:
1388 try:
1390 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1389 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1391 newbranch)
1390 newbranch)
1392 ret = remote_heads
1391 ret = remote_heads
1393 if cg is not None:
1392 if cg is not None:
1394 if unbundle:
1393 if unbundle:
1395 # local repo finds heads on server, finds out what
1394 # local repo finds heads on server, finds out what
1396 # revs it must push. once revs transferred, if server
1395 # revs it must push. once revs transferred, if server
1397 # finds it has different heads (someone else won
1396 # finds it has different heads (someone else won
1398 # commit/push race), server aborts.
1397 # commit/push race), server aborts.
1399 if force:
1398 if force:
1400 remote_heads = ['force']
1399 remote_heads = ['force']
1401 # ssh: return remote's addchangegroup()
1400 # ssh: return remote's addchangegroup()
1402 # http: return remote's addchangegroup() or 0 for error
1401 # http: return remote's addchangegroup() or 0 for error
1403 ret = remote.unbundle(cg, remote_heads, 'push')
1402 ret = remote.unbundle(cg, remote_heads, 'push')
1404 else:
1403 else:
1405 # we return an integer indicating remote head count change
1404 # we return an integer indicating remote head count change
1406 ret = remote.addchangegroup(cg, 'push', self.url(),
1405 ret = remote.addchangegroup(cg, 'push', self.url(),
1407 lock=lock)
1406 lock=lock)
1408 finally:
1407 finally:
1409 if lock is not None:
1408 if lock is not None:
1410 lock.release()
1409 lock.release()
1411
1410
1412 self.ui.debug("checking for updated bookmarks\n")
1411 self.ui.debug("checking for updated bookmarks\n")
1413 rb = remote.listkeys('bookmarks')
1412 rb = remote.listkeys('bookmarks')
1414 for k in rb.keys():
1413 for k in rb.keys():
1415 if k in self._bookmarks:
1414 if k in self._bookmarks:
1416 nr, nl = rb[k], hex(self._bookmarks[k])
1415 nr, nl = rb[k], hex(self._bookmarks[k])
1417 if nr in self:
1416 if nr in self:
1418 cr = self[nr]
1417 cr = self[nr]
1419 cl = self[nl]
1418 cl = self[nl]
1420 if cl in cr.descendants():
1419 if cl in cr.descendants():
1421 r = remote.pushkey('bookmarks', k, nr, nl)
1420 r = remote.pushkey('bookmarks', k, nr, nl)
1422 if r:
1421 if r:
1423 self.ui.status(_("updating bookmark %s\n") % k)
1422 self.ui.status(_("updating bookmark %s\n") % k)
1424 else:
1423 else:
1425 self.ui.warn(_('updating bookmark %s'
1424 self.ui.warn(_('updating bookmark %s'
1426 ' failed!\n') % k)
1425 ' failed!\n') % k)
1427
1426
1428 return ret
1427 return ret
1429
1428
1430 def changegroupinfo(self, nodes, source):
1429 def changegroupinfo(self, nodes, source):
1431 if self.ui.verbose or source == 'bundle':
1430 if self.ui.verbose or source == 'bundle':
1432 self.ui.status(_("%d changesets found\n") % len(nodes))
1431 self.ui.status(_("%d changesets found\n") % len(nodes))
1433 if self.ui.debugflag:
1432 if self.ui.debugflag:
1434 self.ui.debug("list of changesets:\n")
1433 self.ui.debug("list of changesets:\n")
1435 for node in nodes:
1434 for node in nodes:
1436 self.ui.debug("%s\n" % hex(node))
1435 self.ui.debug("%s\n" % hex(node))
1437
1436
1438 def changegroupsubset(self, bases, heads, source):
1437 def changegroupsubset(self, bases, heads, source):
1439 """Compute a changegroup consisting of all the nodes that are
1438 """Compute a changegroup consisting of all the nodes that are
1440 descendents of any of the bases and ancestors of any of the heads.
1439 descendents of any of the bases and ancestors of any of the heads.
1441 Return a chunkbuffer object whose read() method will return
1440 Return a chunkbuffer object whose read() method will return
1442 successive changegroup chunks.
1441 successive changegroup chunks.
1443
1442
1444 It is fairly complex as determining which filenodes and which
1443 It is fairly complex as determining which filenodes and which
1445 manifest nodes need to be included for the changeset to be complete
1444 manifest nodes need to be included for the changeset to be complete
1446 is non-trivial.
1445 is non-trivial.
1447
1446
1448 Another wrinkle is doing the reverse, figuring out which changeset in
1447 Another wrinkle is doing the reverse, figuring out which changeset in
1449 the changegroup a particular filenode or manifestnode belongs to.
1448 the changegroup a particular filenode or manifestnode belongs to.
1450 """
1449 """
1451 cl = self.changelog
1450 cl = self.changelog
1452 if not bases:
1451 if not bases:
1453 bases = [nullid]
1452 bases = [nullid]
1454 csets, bases, heads = cl.nodesbetween(bases, heads)
1453 csets, bases, heads = cl.nodesbetween(bases, heads)
1455 # We assume that all ancestors of bases are known
1454 # We assume that all ancestors of bases are known
1456 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1455 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1457 return self._changegroupsubset(common, csets, heads, source)
1456 return self._changegroupsubset(common, csets, heads, source)
1458
1457
1459 def getbundle(self, source, heads=None, common=None):
1458 def getbundle(self, source, heads=None, common=None):
1460 """Like changegroupsubset, but returns the set difference between the
1459 """Like changegroupsubset, but returns the set difference between the
1461 ancestors of heads and the ancestors common.
1460 ancestors of heads and the ancestors common.
1462
1461
1463 If heads is None, use the local heads. If common is None, use [nullid].
1462 If heads is None, use the local heads. If common is None, use [nullid].
1464
1463
1465 The nodes in common might not all be known locally due to the way the
1464 The nodes in common might not all be known locally due to the way the
1466 current discovery protocol works.
1465 current discovery protocol works.
1467 """
1466 """
1468 cl = self.changelog
1467 cl = self.changelog
1469 if common:
1468 if common:
1470 nm = cl.nodemap
1469 nm = cl.nodemap
1471 common = [n for n in common if n in nm]
1470 common = [n for n in common if n in nm]
1472 else:
1471 else:
1473 common = [nullid]
1472 common = [nullid]
1474 if not heads:
1473 if not heads:
1475 heads = cl.heads()
1474 heads = cl.heads()
1476 common, missing = cl.findcommonmissing(common, heads)
1475 common, missing = cl.findcommonmissing(common, heads)
1477 if not missing:
1476 if not missing:
1478 return None
1477 return None
1479 return self._changegroupsubset(common, missing, heads, source)
1478 return self._changegroupsubset(common, missing, heads, source)
1480
1479
1481 def _changegroupsubset(self, commonrevs, csets, heads, source):
1480 def _changegroupsubset(self, commonrevs, csets, heads, source):
1482
1481
1483 cl = self.changelog
1482 cl = self.changelog
1484 mf = self.manifest
1483 mf = self.manifest
1485 mfs = {} # needed manifests
1484 mfs = {} # needed manifests
1486 fnodes = {} # needed file nodes
1485 fnodes = {} # needed file nodes
1487 changedfiles = set()
1486 changedfiles = set()
1488 fstate = ['', {}]
1487 fstate = ['', {}]
1489 count = [0]
1488 count = [0]
1490
1489
1491 # can we go through the fast path ?
1490 # can we go through the fast path ?
1492 heads.sort()
1491 heads.sort()
1493 if heads == sorted(self.heads()):
1492 if heads == sorted(self.heads()):
1494 return self._changegroup(csets, source)
1493 return self._changegroup(csets, source)
1495
1494
1496 # slow path
1495 # slow path
1497 self.hook('preoutgoing', throw=True, source=source)
1496 self.hook('preoutgoing', throw=True, source=source)
1498 self.changegroupinfo(csets, source)
1497 self.changegroupinfo(csets, source)
1499
1498
1500 # filter any nodes that claim to be part of the known set
1499 # filter any nodes that claim to be part of the known set
1501 def prune(revlog, missing):
1500 def prune(revlog, missing):
1502 for n in missing:
1501 for n in missing:
1503 if revlog.linkrev(revlog.rev(n)) not in commonrevs:
1502 if revlog.linkrev(revlog.rev(n)) not in commonrevs:
1504 yield n
1503 yield n
1505
1504
1506 def lookup(revlog, x):
1505 def lookup(revlog, x):
1507 if revlog == cl:
1506 if revlog == cl:
1508 c = cl.read(x)
1507 c = cl.read(x)
1509 changedfiles.update(c[3])
1508 changedfiles.update(c[3])
1510 mfs.setdefault(c[0], x)
1509 mfs.setdefault(c[0], x)
1511 count[0] += 1
1510 count[0] += 1
1512 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1511 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1513 return x
1512 return x
1514 elif revlog == mf:
1513 elif revlog == mf:
1515 clnode = mfs[x]
1514 clnode = mfs[x]
1516 mdata = mf.readfast(x)
1515 mdata = mf.readfast(x)
1517 for f in changedfiles:
1516 for f in changedfiles:
1518 if f in mdata:
1517 if f in mdata:
1519 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1518 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1520 count[0] += 1
1519 count[0] += 1
1521 self.ui.progress(_('bundling'), count[0],
1520 self.ui.progress(_('bundling'), count[0],
1522 unit=_('manifests'), total=len(mfs))
1521 unit=_('manifests'), total=len(mfs))
1523 return mfs[x]
1522 return mfs[x]
1524 else:
1523 else:
1525 self.ui.progress(
1524 self.ui.progress(
1526 _('bundling'), count[0], item=fstate[0],
1525 _('bundling'), count[0], item=fstate[0],
1527 unit=_('files'), total=len(changedfiles))
1526 unit=_('files'), total=len(changedfiles))
1528 return fstate[1][x]
1527 return fstate[1][x]
1529
1528
1530 bundler = changegroup.bundle10(lookup)
1529 bundler = changegroup.bundle10(lookup)
1531
1530
1532 def gengroup():
1531 def gengroup():
1533 # Create a changenode group generator that will call our functions
1532 # Create a changenode group generator that will call our functions
1534 # back to lookup the owning changenode and collect information.
1533 # back to lookup the owning changenode and collect information.
1535 for chunk in cl.group(csets, bundler):
1534 for chunk in cl.group(csets, bundler):
1536 yield chunk
1535 yield chunk
1537 self.ui.progress(_('bundling'), None)
1536 self.ui.progress(_('bundling'), None)
1538
1537
1539 # Create a generator for the manifestnodes that calls our lookup
1538 # Create a generator for the manifestnodes that calls our lookup
1540 # and data collection functions back.
1539 # and data collection functions back.
1541 count[0] = 0
1540 count[0] = 0
1542 for chunk in mf.group(prune(mf, mfs), bundler):
1541 for chunk in mf.group(prune(mf, mfs), bundler):
1543 yield chunk
1542 yield chunk
1544 self.ui.progress(_('bundling'), None)
1543 self.ui.progress(_('bundling'), None)
1545
1544
1546 mfs.clear()
1545 mfs.clear()
1547
1546
1548 # Go through all our files in order sorted by name.
1547 # Go through all our files in order sorted by name.
1549 count[0] = 0
1548 count[0] = 0
1550 for fname in sorted(changedfiles):
1549 for fname in sorted(changedfiles):
1551 filerevlog = self.file(fname)
1550 filerevlog = self.file(fname)
1552 if not len(filerevlog):
1551 if not len(filerevlog):
1553 raise util.Abort(_("empty or missing revlog for %s") % fname)
1552 raise util.Abort(_("empty or missing revlog for %s") % fname)
1554 fstate[0] = fname
1553 fstate[0] = fname
1555 fstate[1] = fnodes.pop(fname, {})
1554 fstate[1] = fnodes.pop(fname, {})
1556 first = True
1555 first = True
1557
1556
1558 for chunk in filerevlog.group(prune(filerevlog, fstate[1]),
1557 for chunk in filerevlog.group(prune(filerevlog, fstate[1]),
1559 bundler):
1558 bundler):
1560 if first:
1559 if first:
1561 if chunk == bundler.close():
1560 if chunk == bundler.close():
1562 break
1561 break
1563 count[0] += 1
1562 count[0] += 1
1564 yield bundler.fileheader(fname)
1563 yield bundler.fileheader(fname)
1565 first = False
1564 first = False
1566 yield chunk
1565 yield chunk
1567 # Signal that no more groups are left.
1566 # Signal that no more groups are left.
1568 yield bundler.close()
1567 yield bundler.close()
1569 self.ui.progress(_('bundling'), None)
1568 self.ui.progress(_('bundling'), None)
1570
1569
1571 if csets:
1570 if csets:
1572 self.hook('outgoing', node=hex(csets[0]), source=source)
1571 self.hook('outgoing', node=hex(csets[0]), source=source)
1573
1572
1574 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1573 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1575
1574
1576 def changegroup(self, basenodes, source):
1575 def changegroup(self, basenodes, source):
1577 # to avoid a race we use changegroupsubset() (issue1320)
1576 # to avoid a race we use changegroupsubset() (issue1320)
1578 return self.changegroupsubset(basenodes, self.heads(), source)
1577 return self.changegroupsubset(basenodes, self.heads(), source)
1579
1578
1580 def _changegroup(self, nodes, source):
1579 def _changegroup(self, nodes, source):
1581 """Compute the changegroup of all nodes that we have that a recipient
1580 """Compute the changegroup of all nodes that we have that a recipient
1582 doesn't. Return a chunkbuffer object whose read() method will return
1581 doesn't. Return a chunkbuffer object whose read() method will return
1583 successive changegroup chunks.
1582 successive changegroup chunks.
1584
1583
1585 This is much easier than the previous function as we can assume that
1584 This is much easier than the previous function as we can assume that
1586 the recipient has any changenode we aren't sending them.
1585 the recipient has any changenode we aren't sending them.
1587
1586
1588 nodes is the set of nodes to send"""
1587 nodes is the set of nodes to send"""
1589
1588
1590 cl = self.changelog
1589 cl = self.changelog
1591 mf = self.manifest
1590 mf = self.manifest
1592 mfs = {}
1591 mfs = {}
1593 changedfiles = set()
1592 changedfiles = set()
1594 fstate = ['']
1593 fstate = ['']
1595 count = [0]
1594 count = [0]
1596
1595
1597 self.hook('preoutgoing', throw=True, source=source)
1596 self.hook('preoutgoing', throw=True, source=source)
1598 self.changegroupinfo(nodes, source)
1597 self.changegroupinfo(nodes, source)
1599
1598
1600 revset = set([cl.rev(n) for n in nodes])
1599 revset = set([cl.rev(n) for n in nodes])
1601
1600
1602 def gennodelst(log):
1601 def gennodelst(log):
1603 for r in log:
1602 for r in log:
1604 if log.linkrev(r) in revset:
1603 if log.linkrev(r) in revset:
1605 yield log.node(r)
1604 yield log.node(r)
1606
1605
1607 def lookup(revlog, x):
1606 def lookup(revlog, x):
1608 if revlog == cl:
1607 if revlog == cl:
1609 c = cl.read(x)
1608 c = cl.read(x)
1610 changedfiles.update(c[3])
1609 changedfiles.update(c[3])
1611 mfs.setdefault(c[0], x)
1610 mfs.setdefault(c[0], x)
1612 count[0] += 1
1611 count[0] += 1
1613 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1612 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1614 return x
1613 return x
1615 elif revlog == mf:
1614 elif revlog == mf:
1616 count[0] += 1
1615 count[0] += 1
1617 self.ui.progress(_('bundling'), count[0],
1616 self.ui.progress(_('bundling'), count[0],
1618 unit=_('manifests'), total=len(mfs))
1617 unit=_('manifests'), total=len(mfs))
1619 return cl.node(revlog.linkrev(revlog.rev(x)))
1618 return cl.node(revlog.linkrev(revlog.rev(x)))
1620 else:
1619 else:
1621 self.ui.progress(
1620 self.ui.progress(
1622 _('bundling'), count[0], item=fstate[0],
1621 _('bundling'), count[0], item=fstate[0],
1623 total=len(changedfiles), unit=_('files'))
1622 total=len(changedfiles), unit=_('files'))
1624 return cl.node(revlog.linkrev(revlog.rev(x)))
1623 return cl.node(revlog.linkrev(revlog.rev(x)))
1625
1624
1626 bundler = changegroup.bundle10(lookup)
1625 bundler = changegroup.bundle10(lookup)
1627
1626
1628 def gengroup():
1627 def gengroup():
1629 '''yield a sequence of changegroup chunks (strings)'''
1628 '''yield a sequence of changegroup chunks (strings)'''
1630 # construct a list of all changed files
1629 # construct a list of all changed files
1631
1630
1632 for chunk in cl.group(nodes, bundler):
1631 for chunk in cl.group(nodes, bundler):
1633 yield chunk
1632 yield chunk
1634 self.ui.progress(_('bundling'), None)
1633 self.ui.progress(_('bundling'), None)
1635
1634
1636 count[0] = 0
1635 count[0] = 0
1637 for chunk in mf.group(gennodelst(mf), bundler):
1636 for chunk in mf.group(gennodelst(mf), bundler):
1638 yield chunk
1637 yield chunk
1639 self.ui.progress(_('bundling'), None)
1638 self.ui.progress(_('bundling'), None)
1640
1639
1641 count[0] = 0
1640 count[0] = 0
1642 for fname in sorted(changedfiles):
1641 for fname in sorted(changedfiles):
1643 filerevlog = self.file(fname)
1642 filerevlog = self.file(fname)
1644 if not len(filerevlog):
1643 if not len(filerevlog):
1645 raise util.Abort(_("empty or missing revlog for %s") % fname)
1644 raise util.Abort(_("empty or missing revlog for %s") % fname)
1646 fstate[0] = fname
1645 fstate[0] = fname
1647 first = True
1646 first = True
1648 for chunk in filerevlog.group(gennodelst(filerevlog), bundler):
1647 for chunk in filerevlog.group(gennodelst(filerevlog), bundler):
1649 if first:
1648 if first:
1650 if chunk == bundler.close():
1649 if chunk == bundler.close():
1651 break
1650 break
1652 count[0] += 1
1651 count[0] += 1
1653 yield bundler.fileheader(fname)
1652 yield bundler.fileheader(fname)
1654 first = False
1653 first = False
1655 yield chunk
1654 yield chunk
1656 yield bundler.close()
1655 yield bundler.close()
1657 self.ui.progress(_('bundling'), None)
1656 self.ui.progress(_('bundling'), None)
1658
1657
1659 if nodes:
1658 if nodes:
1660 self.hook('outgoing', node=hex(nodes[0]), source=source)
1659 self.hook('outgoing', node=hex(nodes[0]), source=source)
1661
1660
1662 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1661 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1663
1662
1664 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1663 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1665 """Add the changegroup returned by source.read() to this repo.
1664 """Add the changegroup returned by source.read() to this repo.
1666 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1665 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1667 the URL of the repo where this changegroup is coming from.
1666 the URL of the repo where this changegroup is coming from.
1668 If lock is not None, the function takes ownership of the lock
1667 If lock is not None, the function takes ownership of the lock
1669 and releases it after the changegroup is added.
1668 and releases it after the changegroup is added.
1670
1669
1671 Return an integer summarizing the change to this repo:
1670 Return an integer summarizing the change to this repo:
1672 - nothing changed or no source: 0
1671 - nothing changed or no source: 0
1673 - more heads than before: 1+added heads (2..n)
1672 - more heads than before: 1+added heads (2..n)
1674 - fewer heads than before: -1-removed heads (-2..-n)
1673 - fewer heads than before: -1-removed heads (-2..-n)
1675 - number of heads stays the same: 1
1674 - number of heads stays the same: 1
1676 """
1675 """
1677 def csmap(x):
1676 def csmap(x):
1678 self.ui.debug("add changeset %s\n" % short(x))
1677 self.ui.debug("add changeset %s\n" % short(x))
1679 return len(cl)
1678 return len(cl)
1680
1679
1681 def revmap(x):
1680 def revmap(x):
1682 return cl.rev(x)
1681 return cl.rev(x)
1683
1682
1684 if not source:
1683 if not source:
1685 return 0
1684 return 0
1686
1685
1687 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1686 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1688
1687
1689 changesets = files = revisions = 0
1688 changesets = files = revisions = 0
1690 efiles = set()
1689 efiles = set()
1691
1690
1692 # write changelog data to temp files so concurrent readers will not see
1691 # write changelog data to temp files so concurrent readers will not see
1693 # inconsistent view
1692 # inconsistent view
1694 cl = self.changelog
1693 cl = self.changelog
1695 cl.delayupdate()
1694 cl.delayupdate()
1696 oldheads = cl.heads()
1695 oldheads = cl.heads()
1697
1696
1698 tr = self.transaction("\n".join([srctype, urlmod.hidepassword(url)]))
1697 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
1699 try:
1698 try:
1700 trp = weakref.proxy(tr)
1699 trp = weakref.proxy(tr)
1701 # pull off the changeset group
1700 # pull off the changeset group
1702 self.ui.status(_("adding changesets\n"))
1701 self.ui.status(_("adding changesets\n"))
1703 clstart = len(cl)
1702 clstart = len(cl)
1704 class prog(object):
1703 class prog(object):
1705 step = _('changesets')
1704 step = _('changesets')
1706 count = 1
1705 count = 1
1707 ui = self.ui
1706 ui = self.ui
1708 total = None
1707 total = None
1709 def __call__(self):
1708 def __call__(self):
1710 self.ui.progress(self.step, self.count, unit=_('chunks'),
1709 self.ui.progress(self.step, self.count, unit=_('chunks'),
1711 total=self.total)
1710 total=self.total)
1712 self.count += 1
1711 self.count += 1
1713 pr = prog()
1712 pr = prog()
1714 source.callback = pr
1713 source.callback = pr
1715
1714
1716 if (cl.addgroup(source, csmap, trp) is None
1715 if (cl.addgroup(source, csmap, trp) is None
1717 and not emptyok):
1716 and not emptyok):
1718 raise util.Abort(_("received changelog group is empty"))
1717 raise util.Abort(_("received changelog group is empty"))
1719 clend = len(cl)
1718 clend = len(cl)
1720 changesets = clend - clstart
1719 changesets = clend - clstart
1721 for c in xrange(clstart, clend):
1720 for c in xrange(clstart, clend):
1722 efiles.update(self[c].files())
1721 efiles.update(self[c].files())
1723 efiles = len(efiles)
1722 efiles = len(efiles)
1724 self.ui.progress(_('changesets'), None)
1723 self.ui.progress(_('changesets'), None)
1725
1724
1726 # pull off the manifest group
1725 # pull off the manifest group
1727 self.ui.status(_("adding manifests\n"))
1726 self.ui.status(_("adding manifests\n"))
1728 pr.step = _('manifests')
1727 pr.step = _('manifests')
1729 pr.count = 1
1728 pr.count = 1
1730 pr.total = changesets # manifests <= changesets
1729 pr.total = changesets # manifests <= changesets
1731 # no need to check for empty manifest group here:
1730 # no need to check for empty manifest group here:
1732 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1731 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1733 # no new manifest will be created and the manifest group will
1732 # no new manifest will be created and the manifest group will
1734 # be empty during the pull
1733 # be empty during the pull
1735 self.manifest.addgroup(source, revmap, trp)
1734 self.manifest.addgroup(source, revmap, trp)
1736 self.ui.progress(_('manifests'), None)
1735 self.ui.progress(_('manifests'), None)
1737
1736
1738 needfiles = {}
1737 needfiles = {}
1739 if self.ui.configbool('server', 'validate', default=False):
1738 if self.ui.configbool('server', 'validate', default=False):
1740 # validate incoming csets have their manifests
1739 # validate incoming csets have their manifests
1741 for cset in xrange(clstart, clend):
1740 for cset in xrange(clstart, clend):
1742 mfest = self.changelog.read(self.changelog.node(cset))[0]
1741 mfest = self.changelog.read(self.changelog.node(cset))[0]
1743 mfest = self.manifest.readdelta(mfest)
1742 mfest = self.manifest.readdelta(mfest)
1744 # store file nodes we must see
1743 # store file nodes we must see
1745 for f, n in mfest.iteritems():
1744 for f, n in mfest.iteritems():
1746 needfiles.setdefault(f, set()).add(n)
1745 needfiles.setdefault(f, set()).add(n)
1747
1746
1748 # process the files
1747 # process the files
1749 self.ui.status(_("adding file changes\n"))
1748 self.ui.status(_("adding file changes\n"))
1750 pr.step = 'files'
1749 pr.step = 'files'
1751 pr.count = 1
1750 pr.count = 1
1752 pr.total = efiles
1751 pr.total = efiles
1753 source.callback = None
1752 source.callback = None
1754
1753
1755 while 1:
1754 while 1:
1756 f = source.chunk()
1755 f = source.chunk()
1757 if not f:
1756 if not f:
1758 break
1757 break
1759 self.ui.debug("adding %s revisions\n" % f)
1758 self.ui.debug("adding %s revisions\n" % f)
1760 pr()
1759 pr()
1761 fl = self.file(f)
1760 fl = self.file(f)
1762 o = len(fl)
1761 o = len(fl)
1763 if fl.addgroup(source, revmap, trp) is None:
1762 if fl.addgroup(source, revmap, trp) is None:
1764 raise util.Abort(_("received file revlog group is empty"))
1763 raise util.Abort(_("received file revlog group is empty"))
1765 revisions += len(fl) - o
1764 revisions += len(fl) - o
1766 files += 1
1765 files += 1
1767 if f in needfiles:
1766 if f in needfiles:
1768 needs = needfiles[f]
1767 needs = needfiles[f]
1769 for new in xrange(o, len(fl)):
1768 for new in xrange(o, len(fl)):
1770 n = fl.node(new)
1769 n = fl.node(new)
1771 if n in needs:
1770 if n in needs:
1772 needs.remove(n)
1771 needs.remove(n)
1773 if not needs:
1772 if not needs:
1774 del needfiles[f]
1773 del needfiles[f]
1775 self.ui.progress(_('files'), None)
1774 self.ui.progress(_('files'), None)
1776
1775
1777 for f, needs in needfiles.iteritems():
1776 for f, needs in needfiles.iteritems():
1778 fl = self.file(f)
1777 fl = self.file(f)
1779 for n in needs:
1778 for n in needs:
1780 try:
1779 try:
1781 fl.rev(n)
1780 fl.rev(n)
1782 except error.LookupError:
1781 except error.LookupError:
1783 raise util.Abort(
1782 raise util.Abort(
1784 _('missing file data for %s:%s - run hg verify') %
1783 _('missing file data for %s:%s - run hg verify') %
1785 (f, hex(n)))
1784 (f, hex(n)))
1786
1785
1787 dh = 0
1786 dh = 0
1788 if oldheads:
1787 if oldheads:
1789 heads = cl.heads()
1788 heads = cl.heads()
1790 dh = len(heads) - len(oldheads)
1789 dh = len(heads) - len(oldheads)
1791 for h in heads:
1790 for h in heads:
1792 if h not in oldheads and 'close' in self[h].extra():
1791 if h not in oldheads and 'close' in self[h].extra():
1793 dh -= 1
1792 dh -= 1
1794 htext = ""
1793 htext = ""
1795 if dh:
1794 if dh:
1796 htext = _(" (%+d heads)") % dh
1795 htext = _(" (%+d heads)") % dh
1797
1796
1798 self.ui.status(_("added %d changesets"
1797 self.ui.status(_("added %d changesets"
1799 " with %d changes to %d files%s\n")
1798 " with %d changes to %d files%s\n")
1800 % (changesets, revisions, files, htext))
1799 % (changesets, revisions, files, htext))
1801
1800
1802 if changesets > 0:
1801 if changesets > 0:
1803 p = lambda: cl.writepending() and self.root or ""
1802 p = lambda: cl.writepending() and self.root or ""
1804 self.hook('pretxnchangegroup', throw=True,
1803 self.hook('pretxnchangegroup', throw=True,
1805 node=hex(cl.node(clstart)), source=srctype,
1804 node=hex(cl.node(clstart)), source=srctype,
1806 url=url, pending=p)
1805 url=url, pending=p)
1807
1806
1808 # make changelog see real files again
1807 # make changelog see real files again
1809 cl.finalize(trp)
1808 cl.finalize(trp)
1810
1809
1811 tr.close()
1810 tr.close()
1812 finally:
1811 finally:
1813 tr.release()
1812 tr.release()
1814 if lock:
1813 if lock:
1815 lock.release()
1814 lock.release()
1816
1815
1817 if changesets > 0:
1816 if changesets > 0:
1818 # forcefully update the on-disk branch cache
1817 # forcefully update the on-disk branch cache
1819 self.ui.debug("updating the branch cache\n")
1818 self.ui.debug("updating the branch cache\n")
1820 self.updatebranchcache()
1819 self.updatebranchcache()
1821 self.hook("changegroup", node=hex(cl.node(clstart)),
1820 self.hook("changegroup", node=hex(cl.node(clstart)),
1822 source=srctype, url=url)
1821 source=srctype, url=url)
1823
1822
1824 for i in xrange(clstart, clend):
1823 for i in xrange(clstart, clend):
1825 self.hook("incoming", node=hex(cl.node(i)),
1824 self.hook("incoming", node=hex(cl.node(i)),
1826 source=srctype, url=url)
1825 source=srctype, url=url)
1827
1826
1828 # never return 0 here:
1827 # never return 0 here:
1829 if dh < 0:
1828 if dh < 0:
1830 return dh - 1
1829 return dh - 1
1831 else:
1830 else:
1832 return dh + 1
1831 return dh + 1
1833
1832
1834 def stream_in(self, remote, requirements):
1833 def stream_in(self, remote, requirements):
1835 lock = self.lock()
1834 lock = self.lock()
1836 try:
1835 try:
1837 fp = remote.stream_out()
1836 fp = remote.stream_out()
1838 l = fp.readline()
1837 l = fp.readline()
1839 try:
1838 try:
1840 resp = int(l)
1839 resp = int(l)
1841 except ValueError:
1840 except ValueError:
1842 raise error.ResponseError(
1841 raise error.ResponseError(
1843 _('Unexpected response from remote server:'), l)
1842 _('Unexpected response from remote server:'), l)
1844 if resp == 1:
1843 if resp == 1:
1845 raise util.Abort(_('operation forbidden by server'))
1844 raise util.Abort(_('operation forbidden by server'))
1846 elif resp == 2:
1845 elif resp == 2:
1847 raise util.Abort(_('locking the remote repository failed'))
1846 raise util.Abort(_('locking the remote repository failed'))
1848 elif resp != 0:
1847 elif resp != 0:
1849 raise util.Abort(_('the server sent an unknown error code'))
1848 raise util.Abort(_('the server sent an unknown error code'))
1850 self.ui.status(_('streaming all changes\n'))
1849 self.ui.status(_('streaming all changes\n'))
1851 l = fp.readline()
1850 l = fp.readline()
1852 try:
1851 try:
1853 total_files, total_bytes = map(int, l.split(' ', 1))
1852 total_files, total_bytes = map(int, l.split(' ', 1))
1854 except (ValueError, TypeError):
1853 except (ValueError, TypeError):
1855 raise error.ResponseError(
1854 raise error.ResponseError(
1856 _('Unexpected response from remote server:'), l)
1855 _('Unexpected response from remote server:'), l)
1857 self.ui.status(_('%d files to transfer, %s of data\n') %
1856 self.ui.status(_('%d files to transfer, %s of data\n') %
1858 (total_files, util.bytecount(total_bytes)))
1857 (total_files, util.bytecount(total_bytes)))
1859 start = time.time()
1858 start = time.time()
1860 for i in xrange(total_files):
1859 for i in xrange(total_files):
1861 # XXX doesn't support '\n' or '\r' in filenames
1860 # XXX doesn't support '\n' or '\r' in filenames
1862 l = fp.readline()
1861 l = fp.readline()
1863 try:
1862 try:
1864 name, size = l.split('\0', 1)
1863 name, size = l.split('\0', 1)
1865 size = int(size)
1864 size = int(size)
1866 except (ValueError, TypeError):
1865 except (ValueError, TypeError):
1867 raise error.ResponseError(
1866 raise error.ResponseError(
1868 _('Unexpected response from remote server:'), l)
1867 _('Unexpected response from remote server:'), l)
1869 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1868 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1870 # for backwards compat, name was partially encoded
1869 # for backwards compat, name was partially encoded
1871 ofp = self.sopener(store.decodedir(name), 'w')
1870 ofp = self.sopener(store.decodedir(name), 'w')
1872 for chunk in util.filechunkiter(fp, limit=size):
1871 for chunk in util.filechunkiter(fp, limit=size):
1873 ofp.write(chunk)
1872 ofp.write(chunk)
1874 ofp.close()
1873 ofp.close()
1875 elapsed = time.time() - start
1874 elapsed = time.time() - start
1876 if elapsed <= 0:
1875 if elapsed <= 0:
1877 elapsed = 0.001
1876 elapsed = 0.001
1878 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1877 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1879 (util.bytecount(total_bytes), elapsed,
1878 (util.bytecount(total_bytes), elapsed,
1880 util.bytecount(total_bytes / elapsed)))
1879 util.bytecount(total_bytes / elapsed)))
1881
1880
1882 # new requirements = old non-format requirements + new format-related
1881 # new requirements = old non-format requirements + new format-related
1883 # requirements from the streamed-in repository
1882 # requirements from the streamed-in repository
1884 requirements.update(set(self.requirements) - self.supportedformats)
1883 requirements.update(set(self.requirements) - self.supportedformats)
1885 self._applyrequirements(requirements)
1884 self._applyrequirements(requirements)
1886 self._writerequirements()
1885 self._writerequirements()
1887
1886
1888 self.invalidate()
1887 self.invalidate()
1889 return len(self.heads()) + 1
1888 return len(self.heads()) + 1
1890 finally:
1889 finally:
1891 lock.release()
1890 lock.release()
1892
1891
1893 def clone(self, remote, heads=[], stream=False):
1892 def clone(self, remote, heads=[], stream=False):
1894 '''clone remote repository.
1893 '''clone remote repository.
1895
1894
1896 keyword arguments:
1895 keyword arguments:
1897 heads: list of revs to clone (forces use of pull)
1896 heads: list of revs to clone (forces use of pull)
1898 stream: use streaming clone if possible'''
1897 stream: use streaming clone if possible'''
1899
1898
1900 # now, all clients that can request uncompressed clones can
1899 # now, all clients that can request uncompressed clones can
1901 # read repo formats supported by all servers that can serve
1900 # read repo formats supported by all servers that can serve
1902 # them.
1901 # them.
1903
1902
1904 # if revlog format changes, client will have to check version
1903 # if revlog format changes, client will have to check version
1905 # and format flags on "stream" capability, and use
1904 # and format flags on "stream" capability, and use
1906 # uncompressed only if compatible.
1905 # uncompressed only if compatible.
1907
1906
1908 if stream and not heads:
1907 if stream and not heads:
1909 # 'stream' means remote revlog format is revlogv1 only
1908 # 'stream' means remote revlog format is revlogv1 only
1910 if remote.capable('stream'):
1909 if remote.capable('stream'):
1911 return self.stream_in(remote, set(('revlogv1',)))
1910 return self.stream_in(remote, set(('revlogv1',)))
1912 # otherwise, 'streamreqs' contains the remote revlog format
1911 # otherwise, 'streamreqs' contains the remote revlog format
1913 streamreqs = remote.capable('streamreqs')
1912 streamreqs = remote.capable('streamreqs')
1914 if streamreqs:
1913 if streamreqs:
1915 streamreqs = set(streamreqs.split(','))
1914 streamreqs = set(streamreqs.split(','))
1916 # if we support it, stream in and adjust our requirements
1915 # if we support it, stream in and adjust our requirements
1917 if not streamreqs - self.supportedformats:
1916 if not streamreqs - self.supportedformats:
1918 return self.stream_in(remote, streamreqs)
1917 return self.stream_in(remote, streamreqs)
1919 return self.pull(remote, heads)
1918 return self.pull(remote, heads)
1920
1919
1921 def pushkey(self, namespace, key, old, new):
1920 def pushkey(self, namespace, key, old, new):
1922 return pushkey.push(self, namespace, key, old, new)
1921 return pushkey.push(self, namespace, key, old, new)
1923
1922
1924 def listkeys(self, namespace):
1923 def listkeys(self, namespace):
1925 return pushkey.list(self, namespace)
1924 return pushkey.list(self, namespace)
1926
1925
1927 def debugwireargs(self, one, two, three=None, four=None, five=None):
1926 def debugwireargs(self, one, two, three=None, four=None, five=None):
1928 '''used to test argument passing over the wire'''
1927 '''used to test argument passing over the wire'''
1929 return "%s %s %s %s %s" % (one, two, three, four, five)
1928 return "%s %s %s %s %s" % (one, two, three, four, five)
1930
1929
1931 # used to avoid circular references so destructors work
1930 # used to avoid circular references so destructors work
1932 def aftertrans(files):
1931 def aftertrans(files):
1933 renamefiles = [tuple(t) for t in files]
1932 renamefiles = [tuple(t) for t in files]
1934 def a():
1933 def a():
1935 for src, dest in renamefiles:
1934 for src, dest in renamefiles:
1936 util.rename(src, dest)
1935 util.rename(src, dest)
1937 return a
1936 return a
1938
1937
1939 def instance(ui, path, create):
1938 def instance(ui, path, create):
1940 return localrepository(ui, urlmod.localpath(path), create)
1939 return localrepository(ui, util.localpath(path), create)
1941
1940
1942 def islocal(path):
1941 def islocal(path):
1943 return True
1942 return True
@@ -1,214 +1,214 b''
1 # sshrepo.py - ssh repository proxy class for mercurial
1 # sshrepo.py - ssh repository proxy class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import util, error, wireproto, url
9 import util, error, wireproto
10
10
11 class remotelock(object):
11 class remotelock(object):
12 def __init__(self, repo):
12 def __init__(self, repo):
13 self.repo = repo
13 self.repo = repo
14 def release(self):
14 def release(self):
15 self.repo.unlock()
15 self.repo.unlock()
16 self.repo = None
16 self.repo = None
17 def __del__(self):
17 def __del__(self):
18 if self.repo:
18 if self.repo:
19 self.release()
19 self.release()
20
20
21 class sshrepository(wireproto.wirerepository):
21 class sshrepository(wireproto.wirerepository):
22 def __init__(self, ui, path, create=0):
22 def __init__(self, ui, path, create=0):
23 self._url = path
23 self._url = path
24 self.ui = ui
24 self.ui = ui
25
25
26 u = url.url(path, parsequery=False, parsefragment=False)
26 u = util.url(path, parsequery=False, parsefragment=False)
27 if u.scheme != 'ssh' or not u.host or u.path is None:
27 if u.scheme != 'ssh' or not u.host or u.path is None:
28 self._abort(error.RepoError(_("couldn't parse location %s") % path))
28 self._abort(error.RepoError(_("couldn't parse location %s") % path))
29
29
30 self.user = u.user
30 self.user = u.user
31 if u.passwd is not None:
31 if u.passwd is not None:
32 self._abort(error.RepoError(_("password in URL not supported")))
32 self._abort(error.RepoError(_("password in URL not supported")))
33 self.host = u.host
33 self.host = u.host
34 self.port = u.port
34 self.port = u.port
35 self.path = u.path or "."
35 self.path = u.path or "."
36
36
37 sshcmd = self.ui.config("ui", "ssh", "ssh")
37 sshcmd = self.ui.config("ui", "ssh", "ssh")
38 remotecmd = self.ui.config("ui", "remotecmd", "hg")
38 remotecmd = self.ui.config("ui", "remotecmd", "hg")
39
39
40 args = util.sshargs(sshcmd, self.host, self.user, self.port)
40 args = util.sshargs(sshcmd, self.host, self.user, self.port)
41
41
42 if create:
42 if create:
43 cmd = '%s %s "%s init %s"'
43 cmd = '%s %s "%s init %s"'
44 cmd = cmd % (sshcmd, args, remotecmd, self.path)
44 cmd = cmd % (sshcmd, args, remotecmd, self.path)
45
45
46 ui.note(_('running %s\n') % cmd)
46 ui.note(_('running %s\n') % cmd)
47 res = util.system(cmd)
47 res = util.system(cmd)
48 if res != 0:
48 if res != 0:
49 self._abort(error.RepoError(_("could not create remote repo")))
49 self._abort(error.RepoError(_("could not create remote repo")))
50
50
51 self.validate_repo(ui, sshcmd, args, remotecmd)
51 self.validate_repo(ui, sshcmd, args, remotecmd)
52
52
53 def url(self):
53 def url(self):
54 return self._url
54 return self._url
55
55
56 def validate_repo(self, ui, sshcmd, args, remotecmd):
56 def validate_repo(self, ui, sshcmd, args, remotecmd):
57 # cleanup up previous run
57 # cleanup up previous run
58 self.cleanup()
58 self.cleanup()
59
59
60 cmd = '%s %s "%s -R %s serve --stdio"'
60 cmd = '%s %s "%s -R %s serve --stdio"'
61 cmd = cmd % (sshcmd, args, remotecmd, self.path)
61 cmd = cmd % (sshcmd, args, remotecmd, self.path)
62
62
63 cmd = util.quotecommand(cmd)
63 cmd = util.quotecommand(cmd)
64 ui.note(_('running %s\n') % cmd)
64 ui.note(_('running %s\n') % cmd)
65 self.pipeo, self.pipei, self.pipee = util.popen3(cmd)
65 self.pipeo, self.pipei, self.pipee = util.popen3(cmd)
66
66
67 # skip any noise generated by remote shell
67 # skip any noise generated by remote shell
68 self._callstream("hello")
68 self._callstream("hello")
69 r = self._callstream("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
69 r = self._callstream("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
70 lines = ["", "dummy"]
70 lines = ["", "dummy"]
71 max_noise = 500
71 max_noise = 500
72 while lines[-1] and max_noise:
72 while lines[-1] and max_noise:
73 l = r.readline()
73 l = r.readline()
74 self.readerr()
74 self.readerr()
75 if lines[-1] == "1\n" and l == "\n":
75 if lines[-1] == "1\n" and l == "\n":
76 break
76 break
77 if l:
77 if l:
78 ui.debug("remote: ", l)
78 ui.debug("remote: ", l)
79 lines.append(l)
79 lines.append(l)
80 max_noise -= 1
80 max_noise -= 1
81 else:
81 else:
82 self._abort(error.RepoError(_("no suitable response from remote hg")))
82 self._abort(error.RepoError(_("no suitable response from remote hg")))
83
83
84 self.capabilities = set()
84 self.capabilities = set()
85 for l in reversed(lines):
85 for l in reversed(lines):
86 if l.startswith("capabilities:"):
86 if l.startswith("capabilities:"):
87 self.capabilities.update(l[:-1].split(":")[1].split())
87 self.capabilities.update(l[:-1].split(":")[1].split())
88 break
88 break
89
89
90 def readerr(self):
90 def readerr(self):
91 while 1:
91 while 1:
92 size = util.fstat(self.pipee).st_size
92 size = util.fstat(self.pipee).st_size
93 if size == 0:
93 if size == 0:
94 break
94 break
95 s = self.pipee.read(size)
95 s = self.pipee.read(size)
96 if not s:
96 if not s:
97 break
97 break
98 for l in s.splitlines():
98 for l in s.splitlines():
99 self.ui.status(_("remote: "), l, '\n')
99 self.ui.status(_("remote: "), l, '\n')
100
100
101 def _abort(self, exception):
101 def _abort(self, exception):
102 self.cleanup()
102 self.cleanup()
103 raise exception
103 raise exception
104
104
105 def cleanup(self):
105 def cleanup(self):
106 try:
106 try:
107 self.pipeo.close()
107 self.pipeo.close()
108 self.pipei.close()
108 self.pipei.close()
109 # read the error descriptor until EOF
109 # read the error descriptor until EOF
110 for l in self.pipee:
110 for l in self.pipee:
111 self.ui.status(_("remote: "), l)
111 self.ui.status(_("remote: "), l)
112 self.pipee.close()
112 self.pipee.close()
113 except:
113 except:
114 pass
114 pass
115
115
116 __del__ = cleanup
116 __del__ = cleanup
117
117
118 def _callstream(self, cmd, **args):
118 def _callstream(self, cmd, **args):
119 self.ui.debug("sending %s command\n" % cmd)
119 self.ui.debug("sending %s command\n" % cmd)
120 self.pipeo.write("%s\n" % cmd)
120 self.pipeo.write("%s\n" % cmd)
121 _func, names = wireproto.commands[cmd]
121 _func, names = wireproto.commands[cmd]
122 keys = names.split()
122 keys = names.split()
123 wireargs = {}
123 wireargs = {}
124 for k in keys:
124 for k in keys:
125 if k == '*':
125 if k == '*':
126 wireargs['*'] = args
126 wireargs['*'] = args
127 break
127 break
128 else:
128 else:
129 wireargs[k] = args[k]
129 wireargs[k] = args[k]
130 del args[k]
130 del args[k]
131 for k, v in sorted(wireargs.iteritems()):
131 for k, v in sorted(wireargs.iteritems()):
132 self.pipeo.write("%s %d\n" % (k, len(v)))
132 self.pipeo.write("%s %d\n" % (k, len(v)))
133 if isinstance(v, dict):
133 if isinstance(v, dict):
134 for dk, dv in v.iteritems():
134 for dk, dv in v.iteritems():
135 self.pipeo.write("%s %d\n" % (dk, len(dv)))
135 self.pipeo.write("%s %d\n" % (dk, len(dv)))
136 self.pipeo.write(dv)
136 self.pipeo.write(dv)
137 else:
137 else:
138 self.pipeo.write(v)
138 self.pipeo.write(v)
139 self.pipeo.flush()
139 self.pipeo.flush()
140
140
141 return self.pipei
141 return self.pipei
142
142
143 def _call(self, cmd, **args):
143 def _call(self, cmd, **args):
144 self._callstream(cmd, **args)
144 self._callstream(cmd, **args)
145 return self._recv()
145 return self._recv()
146
146
147 def _callpush(self, cmd, fp, **args):
147 def _callpush(self, cmd, fp, **args):
148 r = self._call(cmd, **args)
148 r = self._call(cmd, **args)
149 if r:
149 if r:
150 return '', r
150 return '', r
151 while 1:
151 while 1:
152 d = fp.read(4096)
152 d = fp.read(4096)
153 if not d:
153 if not d:
154 break
154 break
155 self._send(d)
155 self._send(d)
156 self._send("", flush=True)
156 self._send("", flush=True)
157 r = self._recv()
157 r = self._recv()
158 if r:
158 if r:
159 return '', r
159 return '', r
160 return self._recv(), ''
160 return self._recv(), ''
161
161
162 def _decompress(self, stream):
162 def _decompress(self, stream):
163 return stream
163 return stream
164
164
165 def _recv(self):
165 def _recv(self):
166 l = self.pipei.readline()
166 l = self.pipei.readline()
167 self.readerr()
167 self.readerr()
168 try:
168 try:
169 l = int(l)
169 l = int(l)
170 except ValueError:
170 except ValueError:
171 self._abort(error.ResponseError(_("unexpected response:"), l))
171 self._abort(error.ResponseError(_("unexpected response:"), l))
172 return self.pipei.read(l)
172 return self.pipei.read(l)
173
173
174 def _send(self, data, flush=False):
174 def _send(self, data, flush=False):
175 self.pipeo.write("%d\n" % len(data))
175 self.pipeo.write("%d\n" % len(data))
176 if data:
176 if data:
177 self.pipeo.write(data)
177 self.pipeo.write(data)
178 if flush:
178 if flush:
179 self.pipeo.flush()
179 self.pipeo.flush()
180 self.readerr()
180 self.readerr()
181
181
182 def lock(self):
182 def lock(self):
183 self._call("lock")
183 self._call("lock")
184 return remotelock(self)
184 return remotelock(self)
185
185
186 def unlock(self):
186 def unlock(self):
187 self._call("unlock")
187 self._call("unlock")
188
188
189 def addchangegroup(self, cg, source, url):
189 def addchangegroup(self, cg, source, url):
190 '''Send a changegroup to the remote server. Return an integer
190 '''Send a changegroup to the remote server. Return an integer
191 similar to unbundle(). DEPRECATED, since it requires locking the
191 similar to unbundle(). DEPRECATED, since it requires locking the
192 remote.'''
192 remote.'''
193 d = self._call("addchangegroup")
193 d = self._call("addchangegroup")
194 if d:
194 if d:
195 self._abort(error.RepoError(_("push refused: %s") % d))
195 self._abort(error.RepoError(_("push refused: %s") % d))
196 while 1:
196 while 1:
197 d = cg.read(4096)
197 d = cg.read(4096)
198 if not d:
198 if not d:
199 break
199 break
200 self.pipeo.write(d)
200 self.pipeo.write(d)
201 self.readerr()
201 self.readerr()
202
202
203 self.pipeo.flush()
203 self.pipeo.flush()
204
204
205 self.readerr()
205 self.readerr()
206 r = self._recv()
206 r = self._recv()
207 if not r:
207 if not r:
208 return 1
208 return 1
209 try:
209 try:
210 return int(r)
210 return int(r)
211 except ValueError:
211 except ValueError:
212 self._abort(error.ResponseError(_("unexpected response:"), r))
212 self._abort(error.ResponseError(_("unexpected response:"), r))
213
213
214 instance = sshrepository
214 instance = sshrepository
@@ -1,147 +1,147 b''
1 # statichttprepo.py - simple http repository class for mercurial
1 # statichttprepo.py - simple http repository class for mercurial
2 #
2 #
3 # This provides read-only repo access to repositories exported via static http
3 # This provides read-only repo access to repositories exported via static http
4 #
4 #
5 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from i18n import _
10 from i18n import _
11 import changelog, byterange, url, error
11 import changelog, byterange, url, error
12 import localrepo, manifest, util, store
12 import localrepo, manifest, util, store
13 import urllib, urllib2, errno
13 import urllib, urllib2, errno
14
14
15 class httprangereader(object):
15 class httprangereader(object):
16 def __init__(self, url, opener):
16 def __init__(self, url, opener):
17 # we assume opener has HTTPRangeHandler
17 # we assume opener has HTTPRangeHandler
18 self.url = url
18 self.url = url
19 self.pos = 0
19 self.pos = 0
20 self.opener = opener
20 self.opener = opener
21 self.name = url
21 self.name = url
22 def seek(self, pos):
22 def seek(self, pos):
23 self.pos = pos
23 self.pos = pos
24 def read(self, bytes=None):
24 def read(self, bytes=None):
25 req = urllib2.Request(self.url)
25 req = urllib2.Request(self.url)
26 end = ''
26 end = ''
27 if bytes:
27 if bytes:
28 end = self.pos + bytes - 1
28 end = self.pos + bytes - 1
29 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
29 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
30
30
31 try:
31 try:
32 f = self.opener.open(req)
32 f = self.opener.open(req)
33 data = f.read()
33 data = f.read()
34 if hasattr(f, 'getcode'):
34 if hasattr(f, 'getcode'):
35 # python 2.6+
35 # python 2.6+
36 code = f.getcode()
36 code = f.getcode()
37 elif hasattr(f, 'code'):
37 elif hasattr(f, 'code'):
38 # undocumented attribute, seems to be set in 2.4 and 2.5
38 # undocumented attribute, seems to be set in 2.4 and 2.5
39 code = f.code
39 code = f.code
40 else:
40 else:
41 # Don't know how to check, hope for the best.
41 # Don't know how to check, hope for the best.
42 code = 206
42 code = 206
43 except urllib2.HTTPError, inst:
43 except urllib2.HTTPError, inst:
44 num = inst.code == 404 and errno.ENOENT or None
44 num = inst.code == 404 and errno.ENOENT or None
45 raise IOError(num, inst)
45 raise IOError(num, inst)
46 except urllib2.URLError, inst:
46 except urllib2.URLError, inst:
47 raise IOError(None, inst.reason[1])
47 raise IOError(None, inst.reason[1])
48
48
49 if code == 200:
49 if code == 200:
50 # HTTPRangeHandler does nothing if remote does not support
50 # HTTPRangeHandler does nothing if remote does not support
51 # Range headers and returns the full entity. Let's slice it.
51 # Range headers and returns the full entity. Let's slice it.
52 if bytes:
52 if bytes:
53 data = data[self.pos:self.pos + bytes]
53 data = data[self.pos:self.pos + bytes]
54 else:
54 else:
55 data = data[self.pos:]
55 data = data[self.pos:]
56 elif bytes:
56 elif bytes:
57 data = data[:bytes]
57 data = data[:bytes]
58 self.pos += len(data)
58 self.pos += len(data)
59 return data
59 return data
60 def __iter__(self):
60 def __iter__(self):
61 return iter(self.read().splitlines(1))
61 return iter(self.read().splitlines(1))
62 def close(self):
62 def close(self):
63 pass
63 pass
64
64
65 def build_opener(ui, authinfo):
65 def build_opener(ui, authinfo):
66 # urllib cannot handle URLs with embedded user or passwd
66 # urllib cannot handle URLs with embedded user or passwd
67 urlopener = url.opener(ui, authinfo)
67 urlopener = url.opener(ui, authinfo)
68 urlopener.add_handler(byterange.HTTPRangeHandler())
68 urlopener.add_handler(byterange.HTTPRangeHandler())
69
69
70 def opener(base):
70 def opener(base):
71 """return a function that opens files over http"""
71 """return a function that opens files over http"""
72 p = base
72 p = base
73 def o(path, mode="r", atomictemp=None):
73 def o(path, mode="r", atomictemp=None):
74 if mode not in ('r', 'rb'):
74 if mode not in ('r', 'rb'):
75 raise IOError('Permission denied')
75 raise IOError('Permission denied')
76 f = "/".join((p, urllib.quote(path)))
76 f = "/".join((p, urllib.quote(path)))
77 return httprangereader(f, urlopener)
77 return httprangereader(f, urlopener)
78 return o
78 return o
79
79
80 return opener
80 return opener
81
81
82 class statichttprepository(localrepo.localrepository):
82 class statichttprepository(localrepo.localrepository):
83 def __init__(self, ui, path):
83 def __init__(self, ui, path):
84 self._url = path
84 self._url = path
85 self.ui = ui
85 self.ui = ui
86
86
87 self.root = path
87 self.root = path
88 u = url.url(path.rstrip('/') + "/.hg")
88 u = util.url(path.rstrip('/') + "/.hg")
89 self.path, authinfo = u.authinfo()
89 self.path, authinfo = u.authinfo()
90
90
91 opener = build_opener(ui, authinfo)
91 opener = build_opener(ui, authinfo)
92 self.opener = opener(self.path)
92 self.opener = opener(self.path)
93
93
94 # find requirements
94 # find requirements
95 try:
95 try:
96 requirements = self.opener("requires").read().splitlines()
96 requirements = self.opener("requires").read().splitlines()
97 except IOError, inst:
97 except IOError, inst:
98 if inst.errno != errno.ENOENT:
98 if inst.errno != errno.ENOENT:
99 raise
99 raise
100 # check if it is a non-empty old-style repository
100 # check if it is a non-empty old-style repository
101 try:
101 try:
102 fp = self.opener("00changelog.i")
102 fp = self.opener("00changelog.i")
103 fp.read(1)
103 fp.read(1)
104 fp.close()
104 fp.close()
105 except IOError, inst:
105 except IOError, inst:
106 if inst.errno != errno.ENOENT:
106 if inst.errno != errno.ENOENT:
107 raise
107 raise
108 # we do not care about empty old-style repositories here
108 # we do not care about empty old-style repositories here
109 msg = _("'%s' does not appear to be an hg repository") % path
109 msg = _("'%s' does not appear to be an hg repository") % path
110 raise error.RepoError(msg)
110 raise error.RepoError(msg)
111 requirements = []
111 requirements = []
112
112
113 # check them
113 # check them
114 for r in requirements:
114 for r in requirements:
115 if r not in self.supported:
115 if r not in self.supported:
116 raise error.RequirementError(
116 raise error.RequirementError(
117 _("requirement '%s' not supported") % r)
117 _("requirement '%s' not supported") % r)
118
118
119 # setup store
119 # setup store
120 self.store = store.store(requirements, self.path, opener)
120 self.store = store.store(requirements, self.path, opener)
121 self.spath = self.store.path
121 self.spath = self.store.path
122 self.sopener = self.store.opener
122 self.sopener = self.store.opener
123 self.sjoin = self.store.join
123 self.sjoin = self.store.join
124
124
125 self.manifest = manifest.manifest(self.sopener)
125 self.manifest = manifest.manifest(self.sopener)
126 self.changelog = changelog.changelog(self.sopener)
126 self.changelog = changelog.changelog(self.sopener)
127 self._tags = None
127 self._tags = None
128 self.nodetagscache = None
128 self.nodetagscache = None
129 self._branchcache = None
129 self._branchcache = None
130 self._branchcachetip = None
130 self._branchcachetip = None
131 self.encodepats = None
131 self.encodepats = None
132 self.decodepats = None
132 self.decodepats = None
133 self.capabilities = self.capabilities.difference(["pushkey"])
133 self.capabilities = self.capabilities.difference(["pushkey"])
134
134
135 def url(self):
135 def url(self):
136 return self._url
136 return self._url
137
137
138 def local(self):
138 def local(self):
139 return False
139 return False
140
140
141 def lock(self, wait=True):
141 def lock(self, wait=True):
142 raise util.Abort(_('cannot lock static-http repository'))
142 raise util.Abort(_('cannot lock static-http repository'))
143
143
144 def instance(ui, path, create):
144 def instance(ui, path, create):
145 if create:
145 if create:
146 raise util.Abort(_('cannot create new static-http repository'))
146 raise util.Abort(_('cannot create new static-http repository'))
147 return statichttprepository(ui, path[7:])
147 return statichttprepository(ui, path[7:])
@@ -1,1045 +1,1045 b''
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import errno, os, re, xml.dom.minidom, shutil, posixpath
8 import errno, os, re, xml.dom.minidom, shutil, posixpath
9 import stat, subprocess, tarfile
9 import stat, subprocess, tarfile
10 from i18n import _
10 from i18n import _
11 import config, scmutil, util, node, error, cmdutil, url, bookmarks
11 import config, scmutil, util, node, error, cmdutil, bookmarks
12 hg = None
12 hg = None
13 propertycache = util.propertycache
13 propertycache = util.propertycache
14
14
15 nullstate = ('', '', 'empty')
15 nullstate = ('', '', 'empty')
16
16
17 def state(ctx, ui):
17 def state(ctx, ui):
18 """return a state dict, mapping subrepo paths configured in .hgsub
18 """return a state dict, mapping subrepo paths configured in .hgsub
19 to tuple: (source from .hgsub, revision from .hgsubstate, kind
19 to tuple: (source from .hgsub, revision from .hgsubstate, kind
20 (key in types dict))
20 (key in types dict))
21 """
21 """
22 p = config.config()
22 p = config.config()
23 def read(f, sections=None, remap=None):
23 def read(f, sections=None, remap=None):
24 if f in ctx:
24 if f in ctx:
25 try:
25 try:
26 data = ctx[f].data()
26 data = ctx[f].data()
27 except IOError, err:
27 except IOError, err:
28 if err.errno != errno.ENOENT:
28 if err.errno != errno.ENOENT:
29 raise
29 raise
30 # handle missing subrepo spec files as removed
30 # handle missing subrepo spec files as removed
31 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
31 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
32 return
32 return
33 p.parse(f, data, sections, remap, read)
33 p.parse(f, data, sections, remap, read)
34 else:
34 else:
35 raise util.Abort(_("subrepo spec file %s not found") % f)
35 raise util.Abort(_("subrepo spec file %s not found") % f)
36
36
37 if '.hgsub' in ctx:
37 if '.hgsub' in ctx:
38 read('.hgsub')
38 read('.hgsub')
39
39
40 for path, src in ui.configitems('subpaths'):
40 for path, src in ui.configitems('subpaths'):
41 p.set('subpaths', path, src, ui.configsource('subpaths', path))
41 p.set('subpaths', path, src, ui.configsource('subpaths', path))
42
42
43 rev = {}
43 rev = {}
44 if '.hgsubstate' in ctx:
44 if '.hgsubstate' in ctx:
45 try:
45 try:
46 for l in ctx['.hgsubstate'].data().splitlines():
46 for l in ctx['.hgsubstate'].data().splitlines():
47 revision, path = l.split(" ", 1)
47 revision, path = l.split(" ", 1)
48 rev[path] = revision
48 rev[path] = revision
49 except IOError, err:
49 except IOError, err:
50 if err.errno != errno.ENOENT:
50 if err.errno != errno.ENOENT:
51 raise
51 raise
52
52
53 state = {}
53 state = {}
54 for path, src in p[''].items():
54 for path, src in p[''].items():
55 kind = 'hg'
55 kind = 'hg'
56 if src.startswith('['):
56 if src.startswith('['):
57 if ']' not in src:
57 if ']' not in src:
58 raise util.Abort(_('missing ] in subrepo source'))
58 raise util.Abort(_('missing ] in subrepo source'))
59 kind, src = src.split(']', 1)
59 kind, src = src.split(']', 1)
60 kind = kind[1:]
60 kind = kind[1:]
61
61
62 for pattern, repl in p.items('subpaths'):
62 for pattern, repl in p.items('subpaths'):
63 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
63 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
64 # does a string decode.
64 # does a string decode.
65 repl = repl.encode('string-escape')
65 repl = repl.encode('string-escape')
66 # However, we still want to allow back references to go
66 # However, we still want to allow back references to go
67 # through unharmed, so we turn r'\\1' into r'\1'. Again,
67 # through unharmed, so we turn r'\\1' into r'\1'. Again,
68 # extra escapes are needed because re.sub string decodes.
68 # extra escapes are needed because re.sub string decodes.
69 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
69 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
70 try:
70 try:
71 src = re.sub(pattern, repl, src, 1)
71 src = re.sub(pattern, repl, src, 1)
72 except re.error, e:
72 except re.error, e:
73 raise util.Abort(_("bad subrepository pattern in %s: %s")
73 raise util.Abort(_("bad subrepository pattern in %s: %s")
74 % (p.source('subpaths', pattern), e))
74 % (p.source('subpaths', pattern), e))
75
75
76 state[path] = (src.strip(), rev.get(path, ''), kind)
76 state[path] = (src.strip(), rev.get(path, ''), kind)
77
77
78 return state
78 return state
79
79
80 def writestate(repo, state):
80 def writestate(repo, state):
81 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
81 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
82 repo.wwrite('.hgsubstate',
82 repo.wwrite('.hgsubstate',
83 ''.join(['%s %s\n' % (state[s][1], s)
83 ''.join(['%s %s\n' % (state[s][1], s)
84 for s in sorted(state)]), '')
84 for s in sorted(state)]), '')
85
85
86 def submerge(repo, wctx, mctx, actx, overwrite):
86 def submerge(repo, wctx, mctx, actx, overwrite):
87 """delegated from merge.applyupdates: merging of .hgsubstate file
87 """delegated from merge.applyupdates: merging of .hgsubstate file
88 in working context, merging context and ancestor context"""
88 in working context, merging context and ancestor context"""
89 if mctx == actx: # backwards?
89 if mctx == actx: # backwards?
90 actx = wctx.p1()
90 actx = wctx.p1()
91 s1 = wctx.substate
91 s1 = wctx.substate
92 s2 = mctx.substate
92 s2 = mctx.substate
93 sa = actx.substate
93 sa = actx.substate
94 sm = {}
94 sm = {}
95
95
96 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
96 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
97
97
98 def debug(s, msg, r=""):
98 def debug(s, msg, r=""):
99 if r:
99 if r:
100 r = "%s:%s:%s" % r
100 r = "%s:%s:%s" % r
101 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
101 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
102
102
103 for s, l in s1.items():
103 for s, l in s1.items():
104 a = sa.get(s, nullstate)
104 a = sa.get(s, nullstate)
105 ld = l # local state with possible dirty flag for compares
105 ld = l # local state with possible dirty flag for compares
106 if wctx.sub(s).dirty():
106 if wctx.sub(s).dirty():
107 ld = (l[0], l[1] + "+")
107 ld = (l[0], l[1] + "+")
108 if wctx == actx: # overwrite
108 if wctx == actx: # overwrite
109 a = ld
109 a = ld
110
110
111 if s in s2:
111 if s in s2:
112 r = s2[s]
112 r = s2[s]
113 if ld == r or r == a: # no change or local is newer
113 if ld == r or r == a: # no change or local is newer
114 sm[s] = l
114 sm[s] = l
115 continue
115 continue
116 elif ld == a: # other side changed
116 elif ld == a: # other side changed
117 debug(s, "other changed, get", r)
117 debug(s, "other changed, get", r)
118 wctx.sub(s).get(r, overwrite)
118 wctx.sub(s).get(r, overwrite)
119 sm[s] = r
119 sm[s] = r
120 elif ld[0] != r[0]: # sources differ
120 elif ld[0] != r[0]: # sources differ
121 if repo.ui.promptchoice(
121 if repo.ui.promptchoice(
122 _(' subrepository sources for %s differ\n'
122 _(' subrepository sources for %s differ\n'
123 'use (l)ocal source (%s) or (r)emote source (%s)?')
123 'use (l)ocal source (%s) or (r)emote source (%s)?')
124 % (s, l[0], r[0]),
124 % (s, l[0], r[0]),
125 (_('&Local'), _('&Remote')), 0):
125 (_('&Local'), _('&Remote')), 0):
126 debug(s, "prompt changed, get", r)
126 debug(s, "prompt changed, get", r)
127 wctx.sub(s).get(r, overwrite)
127 wctx.sub(s).get(r, overwrite)
128 sm[s] = r
128 sm[s] = r
129 elif ld[1] == a[1]: # local side is unchanged
129 elif ld[1] == a[1]: # local side is unchanged
130 debug(s, "other side changed, get", r)
130 debug(s, "other side changed, get", r)
131 wctx.sub(s).get(r, overwrite)
131 wctx.sub(s).get(r, overwrite)
132 sm[s] = r
132 sm[s] = r
133 else:
133 else:
134 debug(s, "both sides changed, merge with", r)
134 debug(s, "both sides changed, merge with", r)
135 wctx.sub(s).merge(r)
135 wctx.sub(s).merge(r)
136 sm[s] = l
136 sm[s] = l
137 elif ld == a: # remote removed, local unchanged
137 elif ld == a: # remote removed, local unchanged
138 debug(s, "remote removed, remove")
138 debug(s, "remote removed, remove")
139 wctx.sub(s).remove()
139 wctx.sub(s).remove()
140 else:
140 else:
141 if repo.ui.promptchoice(
141 if repo.ui.promptchoice(
142 _(' local changed subrepository %s which remote removed\n'
142 _(' local changed subrepository %s which remote removed\n'
143 'use (c)hanged version or (d)elete?') % s,
143 'use (c)hanged version or (d)elete?') % s,
144 (_('&Changed'), _('&Delete')), 0):
144 (_('&Changed'), _('&Delete')), 0):
145 debug(s, "prompt remove")
145 debug(s, "prompt remove")
146 wctx.sub(s).remove()
146 wctx.sub(s).remove()
147
147
148 for s, r in sorted(s2.items()):
148 for s, r in sorted(s2.items()):
149 if s in s1:
149 if s in s1:
150 continue
150 continue
151 elif s not in sa:
151 elif s not in sa:
152 debug(s, "remote added, get", r)
152 debug(s, "remote added, get", r)
153 mctx.sub(s).get(r)
153 mctx.sub(s).get(r)
154 sm[s] = r
154 sm[s] = r
155 elif r != sa[s]:
155 elif r != sa[s]:
156 if repo.ui.promptchoice(
156 if repo.ui.promptchoice(
157 _(' remote changed subrepository %s which local removed\n'
157 _(' remote changed subrepository %s which local removed\n'
158 'use (c)hanged version or (d)elete?') % s,
158 'use (c)hanged version or (d)elete?') % s,
159 (_('&Changed'), _('&Delete')), 0) == 0:
159 (_('&Changed'), _('&Delete')), 0) == 0:
160 debug(s, "prompt recreate", r)
160 debug(s, "prompt recreate", r)
161 wctx.sub(s).get(r)
161 wctx.sub(s).get(r)
162 sm[s] = r
162 sm[s] = r
163
163
164 # record merged .hgsubstate
164 # record merged .hgsubstate
165 writestate(repo, sm)
165 writestate(repo, sm)
166
166
167 def _updateprompt(ui, sub, dirty, local, remote):
167 def _updateprompt(ui, sub, dirty, local, remote):
168 if dirty:
168 if dirty:
169 msg = (_(' subrepository sources for %s differ\n'
169 msg = (_(' subrepository sources for %s differ\n'
170 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
170 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
171 % (subrelpath(sub), local, remote))
171 % (subrelpath(sub), local, remote))
172 else:
172 else:
173 msg = (_(' subrepository sources for %s differ (in checked out version)\n'
173 msg = (_(' subrepository sources for %s differ (in checked out version)\n'
174 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
174 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
175 % (subrelpath(sub), local, remote))
175 % (subrelpath(sub), local, remote))
176 return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
176 return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
177
177
178 def reporelpath(repo):
178 def reporelpath(repo):
179 """return path to this (sub)repo as seen from outermost repo"""
179 """return path to this (sub)repo as seen from outermost repo"""
180 parent = repo
180 parent = repo
181 while hasattr(parent, '_subparent'):
181 while hasattr(parent, '_subparent'):
182 parent = parent._subparent
182 parent = parent._subparent
183 return repo.root[len(parent.root)+1:]
183 return repo.root[len(parent.root)+1:]
184
184
185 def subrelpath(sub):
185 def subrelpath(sub):
186 """return path to this subrepo as seen from outermost repo"""
186 """return path to this subrepo as seen from outermost repo"""
187 if hasattr(sub, '_relpath'):
187 if hasattr(sub, '_relpath'):
188 return sub._relpath
188 return sub._relpath
189 if not hasattr(sub, '_repo'):
189 if not hasattr(sub, '_repo'):
190 return sub._path
190 return sub._path
191 return reporelpath(sub._repo)
191 return reporelpath(sub._repo)
192
192
193 def _abssource(repo, push=False, abort=True):
193 def _abssource(repo, push=False, abort=True):
194 """return pull/push path of repo - either based on parent repo .hgsub info
194 """return pull/push path of repo - either based on parent repo .hgsub info
195 or on the top repo config. Abort or return None if no source found."""
195 or on the top repo config. Abort or return None if no source found."""
196 if hasattr(repo, '_subparent'):
196 if hasattr(repo, '_subparent'):
197 source = url.url(repo._subsource)
197 source = util.url(repo._subsource)
198 source.path = posixpath.normpath(source.path)
198 source.path = posixpath.normpath(source.path)
199 if posixpath.isabs(source.path) or source.scheme:
199 if posixpath.isabs(source.path) or source.scheme:
200 return str(source)
200 return str(source)
201 parent = _abssource(repo._subparent, push, abort=False)
201 parent = _abssource(repo._subparent, push, abort=False)
202 if parent:
202 if parent:
203 parent = url.url(parent)
203 parent = util.url(parent)
204 parent.path = posixpath.join(parent.path, source.path)
204 parent.path = posixpath.join(parent.path, source.path)
205 parent.path = posixpath.normpath(parent.path)
205 parent.path = posixpath.normpath(parent.path)
206 return str(parent)
206 return str(parent)
207 else: # recursion reached top repo
207 else: # recursion reached top repo
208 if hasattr(repo, '_subtoppath'):
208 if hasattr(repo, '_subtoppath'):
209 return repo._subtoppath
209 return repo._subtoppath
210 if push and repo.ui.config('paths', 'default-push'):
210 if push and repo.ui.config('paths', 'default-push'):
211 return repo.ui.config('paths', 'default-push')
211 return repo.ui.config('paths', 'default-push')
212 if repo.ui.config('paths', 'default'):
212 if repo.ui.config('paths', 'default'):
213 return repo.ui.config('paths', 'default')
213 return repo.ui.config('paths', 'default')
214 if abort:
214 if abort:
215 raise util.Abort(_("default path for subrepository %s not found") %
215 raise util.Abort(_("default path for subrepository %s not found") %
216 reporelpath(repo))
216 reporelpath(repo))
217
217
218 def itersubrepos(ctx1, ctx2):
218 def itersubrepos(ctx1, ctx2):
219 """find subrepos in ctx1 or ctx2"""
219 """find subrepos in ctx1 or ctx2"""
220 # Create a (subpath, ctx) mapping where we prefer subpaths from
220 # Create a (subpath, ctx) mapping where we prefer subpaths from
221 # ctx1. The subpaths from ctx2 are important when the .hgsub file
221 # ctx1. The subpaths from ctx2 are important when the .hgsub file
222 # has been modified (in ctx2) but not yet committed (in ctx1).
222 # has been modified (in ctx2) but not yet committed (in ctx1).
223 subpaths = dict.fromkeys(ctx2.substate, ctx2)
223 subpaths = dict.fromkeys(ctx2.substate, ctx2)
224 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
224 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
225 for subpath, ctx in sorted(subpaths.iteritems()):
225 for subpath, ctx in sorted(subpaths.iteritems()):
226 yield subpath, ctx.sub(subpath)
226 yield subpath, ctx.sub(subpath)
227
227
228 def subrepo(ctx, path):
228 def subrepo(ctx, path):
229 """return instance of the right subrepo class for subrepo in path"""
229 """return instance of the right subrepo class for subrepo in path"""
230 # subrepo inherently violates our import layering rules
230 # subrepo inherently violates our import layering rules
231 # because it wants to make repo objects from deep inside the stack
231 # because it wants to make repo objects from deep inside the stack
232 # so we manually delay the circular imports to not break
232 # so we manually delay the circular imports to not break
233 # scripts that don't use our demand-loading
233 # scripts that don't use our demand-loading
234 global hg
234 global hg
235 import hg as h
235 import hg as h
236 hg = h
236 hg = h
237
237
238 scmutil.path_auditor(ctx._repo.root)(path)
238 scmutil.path_auditor(ctx._repo.root)(path)
239 state = ctx.substate.get(path, nullstate)
239 state = ctx.substate.get(path, nullstate)
240 if state[2] not in types:
240 if state[2] not in types:
241 raise util.Abort(_('unknown subrepo type %s') % state[2])
241 raise util.Abort(_('unknown subrepo type %s') % state[2])
242 return types[state[2]](ctx, path, state[:2])
242 return types[state[2]](ctx, path, state[:2])
243
243
244 # subrepo classes need to implement the following abstract class:
244 # subrepo classes need to implement the following abstract class:
245
245
246 class abstractsubrepo(object):
246 class abstractsubrepo(object):
247
247
248 def dirty(self, ignoreupdate=False):
248 def dirty(self, ignoreupdate=False):
249 """returns true if the dirstate of the subrepo is dirty or does not
249 """returns true if the dirstate of the subrepo is dirty or does not
250 match current stored state. If ignoreupdate is true, only check
250 match current stored state. If ignoreupdate is true, only check
251 whether the subrepo has uncommitted changes in its dirstate.
251 whether the subrepo has uncommitted changes in its dirstate.
252 """
252 """
253 raise NotImplementedError
253 raise NotImplementedError
254
254
255 def checknested(self, path):
255 def checknested(self, path):
256 """check if path is a subrepository within this repository"""
256 """check if path is a subrepository within this repository"""
257 return False
257 return False
258
258
259 def commit(self, text, user, date):
259 def commit(self, text, user, date):
260 """commit the current changes to the subrepo with the given
260 """commit the current changes to the subrepo with the given
261 log message. Use given user and date if possible. Return the
261 log message. Use given user and date if possible. Return the
262 new state of the subrepo.
262 new state of the subrepo.
263 """
263 """
264 raise NotImplementedError
264 raise NotImplementedError
265
265
266 def remove(self):
266 def remove(self):
267 """remove the subrepo
267 """remove the subrepo
268
268
269 (should verify the dirstate is not dirty first)
269 (should verify the dirstate is not dirty first)
270 """
270 """
271 raise NotImplementedError
271 raise NotImplementedError
272
272
273 def get(self, state, overwrite=False):
273 def get(self, state, overwrite=False):
274 """run whatever commands are needed to put the subrepo into
274 """run whatever commands are needed to put the subrepo into
275 this state
275 this state
276 """
276 """
277 raise NotImplementedError
277 raise NotImplementedError
278
278
279 def merge(self, state):
279 def merge(self, state):
280 """merge currently-saved state with the new state."""
280 """merge currently-saved state with the new state."""
281 raise NotImplementedError
281 raise NotImplementedError
282
282
283 def push(self, force):
283 def push(self, force):
284 """perform whatever action is analogous to 'hg push'
284 """perform whatever action is analogous to 'hg push'
285
285
286 This may be a no-op on some systems.
286 This may be a no-op on some systems.
287 """
287 """
288 raise NotImplementedError
288 raise NotImplementedError
289
289
290 def add(self, ui, match, dryrun, prefix):
290 def add(self, ui, match, dryrun, prefix):
291 return []
291 return []
292
292
293 def status(self, rev2, **opts):
293 def status(self, rev2, **opts):
294 return [], [], [], [], [], [], []
294 return [], [], [], [], [], [], []
295
295
296 def diff(self, diffopts, node2, match, prefix, **opts):
296 def diff(self, diffopts, node2, match, prefix, **opts):
297 pass
297 pass
298
298
299 def outgoing(self, ui, dest, opts):
299 def outgoing(self, ui, dest, opts):
300 return 1
300 return 1
301
301
302 def incoming(self, ui, source, opts):
302 def incoming(self, ui, source, opts):
303 return 1
303 return 1
304
304
305 def files(self):
305 def files(self):
306 """return filename iterator"""
306 """return filename iterator"""
307 raise NotImplementedError
307 raise NotImplementedError
308
308
309 def filedata(self, name):
309 def filedata(self, name):
310 """return file data"""
310 """return file data"""
311 raise NotImplementedError
311 raise NotImplementedError
312
312
313 def fileflags(self, name):
313 def fileflags(self, name):
314 """return file flags"""
314 """return file flags"""
315 return ''
315 return ''
316
316
317 def archive(self, ui, archiver, prefix):
317 def archive(self, ui, archiver, prefix):
318 files = self.files()
318 files = self.files()
319 total = len(files)
319 total = len(files)
320 relpath = subrelpath(self)
320 relpath = subrelpath(self)
321 ui.progress(_('archiving (%s)') % relpath, 0,
321 ui.progress(_('archiving (%s)') % relpath, 0,
322 unit=_('files'), total=total)
322 unit=_('files'), total=total)
323 for i, name in enumerate(files):
323 for i, name in enumerate(files):
324 flags = self.fileflags(name)
324 flags = self.fileflags(name)
325 mode = 'x' in flags and 0755 or 0644
325 mode = 'x' in flags and 0755 or 0644
326 symlink = 'l' in flags
326 symlink = 'l' in flags
327 archiver.addfile(os.path.join(prefix, self._path, name),
327 archiver.addfile(os.path.join(prefix, self._path, name),
328 mode, symlink, self.filedata(name))
328 mode, symlink, self.filedata(name))
329 ui.progress(_('archiving (%s)') % relpath, i + 1,
329 ui.progress(_('archiving (%s)') % relpath, i + 1,
330 unit=_('files'), total=total)
330 unit=_('files'), total=total)
331 ui.progress(_('archiving (%s)') % relpath, None)
331 ui.progress(_('archiving (%s)') % relpath, None)
332
332
333
333
334 class hgsubrepo(abstractsubrepo):
334 class hgsubrepo(abstractsubrepo):
335 def __init__(self, ctx, path, state):
335 def __init__(self, ctx, path, state):
336 self._path = path
336 self._path = path
337 self._state = state
337 self._state = state
338 r = ctx._repo
338 r = ctx._repo
339 root = r.wjoin(path)
339 root = r.wjoin(path)
340 create = False
340 create = False
341 if not os.path.exists(os.path.join(root, '.hg')):
341 if not os.path.exists(os.path.join(root, '.hg')):
342 create = True
342 create = True
343 util.makedirs(root)
343 util.makedirs(root)
344 self._repo = hg.repository(r.ui, root, create=create)
344 self._repo = hg.repository(r.ui, root, create=create)
345 self._repo._subparent = r
345 self._repo._subparent = r
346 self._repo._subsource = state[0]
346 self._repo._subsource = state[0]
347
347
348 if create:
348 if create:
349 fp = self._repo.opener("hgrc", "w", text=True)
349 fp = self._repo.opener("hgrc", "w", text=True)
350 fp.write('[paths]\n')
350 fp.write('[paths]\n')
351
351
352 def addpathconfig(key, value):
352 def addpathconfig(key, value):
353 if value:
353 if value:
354 fp.write('%s = %s\n' % (key, value))
354 fp.write('%s = %s\n' % (key, value))
355 self._repo.ui.setconfig('paths', key, value)
355 self._repo.ui.setconfig('paths', key, value)
356
356
357 defpath = _abssource(self._repo, abort=False)
357 defpath = _abssource(self._repo, abort=False)
358 defpushpath = _abssource(self._repo, True, abort=False)
358 defpushpath = _abssource(self._repo, True, abort=False)
359 addpathconfig('default', defpath)
359 addpathconfig('default', defpath)
360 if defpath != defpushpath:
360 if defpath != defpushpath:
361 addpathconfig('default-push', defpushpath)
361 addpathconfig('default-push', defpushpath)
362 fp.close()
362 fp.close()
363
363
364 def add(self, ui, match, dryrun, prefix):
364 def add(self, ui, match, dryrun, prefix):
365 return cmdutil.add(ui, self._repo, match, dryrun, True,
365 return cmdutil.add(ui, self._repo, match, dryrun, True,
366 os.path.join(prefix, self._path))
366 os.path.join(prefix, self._path))
367
367
368 def status(self, rev2, **opts):
368 def status(self, rev2, **opts):
369 try:
369 try:
370 rev1 = self._state[1]
370 rev1 = self._state[1]
371 ctx1 = self._repo[rev1]
371 ctx1 = self._repo[rev1]
372 ctx2 = self._repo[rev2]
372 ctx2 = self._repo[rev2]
373 return self._repo.status(ctx1, ctx2, **opts)
373 return self._repo.status(ctx1, ctx2, **opts)
374 except error.RepoLookupError, inst:
374 except error.RepoLookupError, inst:
375 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
375 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
376 % (inst, subrelpath(self)))
376 % (inst, subrelpath(self)))
377 return [], [], [], [], [], [], []
377 return [], [], [], [], [], [], []
378
378
379 def diff(self, diffopts, node2, match, prefix, **opts):
379 def diff(self, diffopts, node2, match, prefix, **opts):
380 try:
380 try:
381 node1 = node.bin(self._state[1])
381 node1 = node.bin(self._state[1])
382 # We currently expect node2 to come from substate and be
382 # We currently expect node2 to come from substate and be
383 # in hex format
383 # in hex format
384 if node2 is not None:
384 if node2 is not None:
385 node2 = node.bin(node2)
385 node2 = node.bin(node2)
386 cmdutil.diffordiffstat(self._repo.ui, self._repo, diffopts,
386 cmdutil.diffordiffstat(self._repo.ui, self._repo, diffopts,
387 node1, node2, match,
387 node1, node2, match,
388 prefix=os.path.join(prefix, self._path),
388 prefix=os.path.join(prefix, self._path),
389 listsubrepos=True, **opts)
389 listsubrepos=True, **opts)
390 except error.RepoLookupError, inst:
390 except error.RepoLookupError, inst:
391 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
391 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
392 % (inst, subrelpath(self)))
392 % (inst, subrelpath(self)))
393
393
394 def archive(self, ui, archiver, prefix):
394 def archive(self, ui, archiver, prefix):
395 abstractsubrepo.archive(self, ui, archiver, prefix)
395 abstractsubrepo.archive(self, ui, archiver, prefix)
396
396
397 rev = self._state[1]
397 rev = self._state[1]
398 ctx = self._repo[rev]
398 ctx = self._repo[rev]
399 for subpath in ctx.substate:
399 for subpath in ctx.substate:
400 s = subrepo(ctx, subpath)
400 s = subrepo(ctx, subpath)
401 s.archive(ui, archiver, os.path.join(prefix, self._path))
401 s.archive(ui, archiver, os.path.join(prefix, self._path))
402
402
403 def dirty(self, ignoreupdate=False):
403 def dirty(self, ignoreupdate=False):
404 r = self._state[1]
404 r = self._state[1]
405 if r == '' and not ignoreupdate: # no state recorded
405 if r == '' and not ignoreupdate: # no state recorded
406 return True
406 return True
407 w = self._repo[None]
407 w = self._repo[None]
408 if w.p1() != self._repo[r] and not ignoreupdate:
408 if w.p1() != self._repo[r] and not ignoreupdate:
409 # different version checked out
409 # different version checked out
410 return True
410 return True
411 return w.dirty() # working directory changed
411 return w.dirty() # working directory changed
412
412
413 def checknested(self, path):
413 def checknested(self, path):
414 return self._repo._checknested(self._repo.wjoin(path))
414 return self._repo._checknested(self._repo.wjoin(path))
415
415
416 def commit(self, text, user, date):
416 def commit(self, text, user, date):
417 self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
417 self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
418 n = self._repo.commit(text, user, date)
418 n = self._repo.commit(text, user, date)
419 if not n:
419 if not n:
420 return self._repo['.'].hex() # different version checked out
420 return self._repo['.'].hex() # different version checked out
421 return node.hex(n)
421 return node.hex(n)
422
422
423 def remove(self):
423 def remove(self):
424 # we can't fully delete the repository as it may contain
424 # we can't fully delete the repository as it may contain
425 # local-only history
425 # local-only history
426 self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
426 self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
427 hg.clean(self._repo, node.nullid, False)
427 hg.clean(self._repo, node.nullid, False)
428
428
429 def _get(self, state):
429 def _get(self, state):
430 source, revision, kind = state
430 source, revision, kind = state
431 if revision not in self._repo:
431 if revision not in self._repo:
432 self._repo._subsource = source
432 self._repo._subsource = source
433 srcurl = _abssource(self._repo)
433 srcurl = _abssource(self._repo)
434 self._repo.ui.status(_('pulling subrepo %s from %s\n')
434 self._repo.ui.status(_('pulling subrepo %s from %s\n')
435 % (subrelpath(self), srcurl))
435 % (subrelpath(self), srcurl))
436 other = hg.repository(self._repo.ui, srcurl)
436 other = hg.repository(self._repo.ui, srcurl)
437 self._repo.pull(other)
437 self._repo.pull(other)
438 bookmarks.updatefromremote(self._repo.ui, self._repo, other)
438 bookmarks.updatefromremote(self._repo.ui, self._repo, other)
439
439
440 def get(self, state, overwrite=False):
440 def get(self, state, overwrite=False):
441 self._get(state)
441 self._get(state)
442 source, revision, kind = state
442 source, revision, kind = state
443 self._repo.ui.debug("getting subrepo %s\n" % self._path)
443 self._repo.ui.debug("getting subrepo %s\n" % self._path)
444 hg.clean(self._repo, revision, False)
444 hg.clean(self._repo, revision, False)
445
445
446 def merge(self, state):
446 def merge(self, state):
447 self._get(state)
447 self._get(state)
448 cur = self._repo['.']
448 cur = self._repo['.']
449 dst = self._repo[state[1]]
449 dst = self._repo[state[1]]
450 anc = dst.ancestor(cur)
450 anc = dst.ancestor(cur)
451
451
452 def mergefunc():
452 def mergefunc():
453 if anc == cur:
453 if anc == cur:
454 self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
454 self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
455 hg.update(self._repo, state[1])
455 hg.update(self._repo, state[1])
456 elif anc == dst:
456 elif anc == dst:
457 self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
457 self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
458 else:
458 else:
459 self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
459 self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
460 hg.merge(self._repo, state[1], remind=False)
460 hg.merge(self._repo, state[1], remind=False)
461
461
462 wctx = self._repo[None]
462 wctx = self._repo[None]
463 if self.dirty():
463 if self.dirty():
464 if anc != dst:
464 if anc != dst:
465 if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
465 if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
466 mergefunc()
466 mergefunc()
467 else:
467 else:
468 mergefunc()
468 mergefunc()
469 else:
469 else:
470 mergefunc()
470 mergefunc()
471
471
472 def push(self, force):
472 def push(self, force):
473 # push subrepos depth-first for coherent ordering
473 # push subrepos depth-first for coherent ordering
474 c = self._repo['']
474 c = self._repo['']
475 subs = c.substate # only repos that are committed
475 subs = c.substate # only repos that are committed
476 for s in sorted(subs):
476 for s in sorted(subs):
477 if not c.sub(s).push(force):
477 if not c.sub(s).push(force):
478 return False
478 return False
479
479
480 dsturl = _abssource(self._repo, True)
480 dsturl = _abssource(self._repo, True)
481 self._repo.ui.status(_('pushing subrepo %s to %s\n') %
481 self._repo.ui.status(_('pushing subrepo %s to %s\n') %
482 (subrelpath(self), dsturl))
482 (subrelpath(self), dsturl))
483 other = hg.repository(self._repo.ui, dsturl)
483 other = hg.repository(self._repo.ui, dsturl)
484 return self._repo.push(other, force)
484 return self._repo.push(other, force)
485
485
486 def outgoing(self, ui, dest, opts):
486 def outgoing(self, ui, dest, opts):
487 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
487 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
488
488
489 def incoming(self, ui, source, opts):
489 def incoming(self, ui, source, opts):
490 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
490 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
491
491
492 def files(self):
492 def files(self):
493 rev = self._state[1]
493 rev = self._state[1]
494 ctx = self._repo[rev]
494 ctx = self._repo[rev]
495 return ctx.manifest()
495 return ctx.manifest()
496
496
497 def filedata(self, name):
497 def filedata(self, name):
498 rev = self._state[1]
498 rev = self._state[1]
499 return self._repo[rev][name].data()
499 return self._repo[rev][name].data()
500
500
501 def fileflags(self, name):
501 def fileflags(self, name):
502 rev = self._state[1]
502 rev = self._state[1]
503 ctx = self._repo[rev]
503 ctx = self._repo[rev]
504 return ctx.flags(name)
504 return ctx.flags(name)
505
505
506
506
507 class svnsubrepo(abstractsubrepo):
507 class svnsubrepo(abstractsubrepo):
508 def __init__(self, ctx, path, state):
508 def __init__(self, ctx, path, state):
509 self._path = path
509 self._path = path
510 self._state = state
510 self._state = state
511 self._ctx = ctx
511 self._ctx = ctx
512 self._ui = ctx._repo.ui
512 self._ui = ctx._repo.ui
513
513
514 def _svncommand(self, commands, filename=''):
514 def _svncommand(self, commands, filename=''):
515 cmd = ['svn']
515 cmd = ['svn']
516 # Starting in svn 1.5 --non-interactive is a global flag
516 # Starting in svn 1.5 --non-interactive is a global flag
517 # instead of being per-command, but we need to support 1.4 so
517 # instead of being per-command, but we need to support 1.4 so
518 # we have to be intelligent about what commands take
518 # we have to be intelligent about what commands take
519 # --non-interactive.
519 # --non-interactive.
520 if (not self._ui.interactive() and
520 if (not self._ui.interactive() and
521 commands[0] in ('update', 'checkout', 'commit')):
521 commands[0] in ('update', 'checkout', 'commit')):
522 cmd.append('--non-interactive')
522 cmd.append('--non-interactive')
523 cmd.extend(commands)
523 cmd.extend(commands)
524 if filename is not None:
524 if filename is not None:
525 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
525 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
526 cmd.append(path)
526 cmd.append(path)
527 env = dict(os.environ)
527 env = dict(os.environ)
528 # Avoid localized output, preserve current locale for everything else.
528 # Avoid localized output, preserve current locale for everything else.
529 env['LC_MESSAGES'] = 'C'
529 env['LC_MESSAGES'] = 'C'
530 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
530 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
531 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
531 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
532 universal_newlines=True, env=env)
532 universal_newlines=True, env=env)
533 stdout, stderr = p.communicate()
533 stdout, stderr = p.communicate()
534 stderr = stderr.strip()
534 stderr = stderr.strip()
535 if stderr:
535 if stderr:
536 raise util.Abort(stderr)
536 raise util.Abort(stderr)
537 return stdout
537 return stdout
538
538
539 @propertycache
539 @propertycache
540 def _svnversion(self):
540 def _svnversion(self):
541 output = self._svncommand(['--version'], filename=None)
541 output = self._svncommand(['--version'], filename=None)
542 m = re.search(r'^svn,\s+version\s+(\d+)\.(\d+)', output)
542 m = re.search(r'^svn,\s+version\s+(\d+)\.(\d+)', output)
543 if not m:
543 if not m:
544 raise util.Abort(_('cannot retrieve svn tool version'))
544 raise util.Abort(_('cannot retrieve svn tool version'))
545 return (int(m.group(1)), int(m.group(2)))
545 return (int(m.group(1)), int(m.group(2)))
546
546
547 def _wcrevs(self):
547 def _wcrevs(self):
548 # Get the working directory revision as well as the last
548 # Get the working directory revision as well as the last
549 # commit revision so we can compare the subrepo state with
549 # commit revision so we can compare the subrepo state with
550 # both. We used to store the working directory one.
550 # both. We used to store the working directory one.
551 output = self._svncommand(['info', '--xml'])
551 output = self._svncommand(['info', '--xml'])
552 doc = xml.dom.minidom.parseString(output)
552 doc = xml.dom.minidom.parseString(output)
553 entries = doc.getElementsByTagName('entry')
553 entries = doc.getElementsByTagName('entry')
554 lastrev, rev = '0', '0'
554 lastrev, rev = '0', '0'
555 if entries:
555 if entries:
556 rev = str(entries[0].getAttribute('revision')) or '0'
556 rev = str(entries[0].getAttribute('revision')) or '0'
557 commits = entries[0].getElementsByTagName('commit')
557 commits = entries[0].getElementsByTagName('commit')
558 if commits:
558 if commits:
559 lastrev = str(commits[0].getAttribute('revision')) or '0'
559 lastrev = str(commits[0].getAttribute('revision')) or '0'
560 return (lastrev, rev)
560 return (lastrev, rev)
561
561
562 def _wcrev(self):
562 def _wcrev(self):
563 return self._wcrevs()[0]
563 return self._wcrevs()[0]
564
564
565 def _wcchanged(self):
565 def _wcchanged(self):
566 """Return (changes, extchanges) where changes is True
566 """Return (changes, extchanges) where changes is True
567 if the working directory was changed, and extchanges is
567 if the working directory was changed, and extchanges is
568 True if any of these changes concern an external entry.
568 True if any of these changes concern an external entry.
569 """
569 """
570 output = self._svncommand(['status', '--xml'])
570 output = self._svncommand(['status', '--xml'])
571 externals, changes = [], []
571 externals, changes = [], []
572 doc = xml.dom.minidom.parseString(output)
572 doc = xml.dom.minidom.parseString(output)
573 for e in doc.getElementsByTagName('entry'):
573 for e in doc.getElementsByTagName('entry'):
574 s = e.getElementsByTagName('wc-status')
574 s = e.getElementsByTagName('wc-status')
575 if not s:
575 if not s:
576 continue
576 continue
577 item = s[0].getAttribute('item')
577 item = s[0].getAttribute('item')
578 props = s[0].getAttribute('props')
578 props = s[0].getAttribute('props')
579 path = e.getAttribute('path')
579 path = e.getAttribute('path')
580 if item == 'external':
580 if item == 'external':
581 externals.append(path)
581 externals.append(path)
582 if (item not in ('', 'normal', 'unversioned', 'external')
582 if (item not in ('', 'normal', 'unversioned', 'external')
583 or props not in ('', 'none')):
583 or props not in ('', 'none')):
584 changes.append(path)
584 changes.append(path)
585 for path in changes:
585 for path in changes:
586 for ext in externals:
586 for ext in externals:
587 if path == ext or path.startswith(ext + os.sep):
587 if path == ext or path.startswith(ext + os.sep):
588 return True, True
588 return True, True
589 return bool(changes), False
589 return bool(changes), False
590
590
591 def dirty(self, ignoreupdate=False):
591 def dirty(self, ignoreupdate=False):
592 if not self._wcchanged()[0]:
592 if not self._wcchanged()[0]:
593 if self._state[1] in self._wcrevs() or ignoreupdate:
593 if self._state[1] in self._wcrevs() or ignoreupdate:
594 return False
594 return False
595 return True
595 return True
596
596
597 def commit(self, text, user, date):
597 def commit(self, text, user, date):
598 # user and date are out of our hands since svn is centralized
598 # user and date are out of our hands since svn is centralized
599 changed, extchanged = self._wcchanged()
599 changed, extchanged = self._wcchanged()
600 if not changed:
600 if not changed:
601 return self._wcrev()
601 return self._wcrev()
602 if extchanged:
602 if extchanged:
603 # Do not try to commit externals
603 # Do not try to commit externals
604 raise util.Abort(_('cannot commit svn externals'))
604 raise util.Abort(_('cannot commit svn externals'))
605 commitinfo = self._svncommand(['commit', '-m', text])
605 commitinfo = self._svncommand(['commit', '-m', text])
606 self._ui.status(commitinfo)
606 self._ui.status(commitinfo)
607 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
607 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
608 if not newrev:
608 if not newrev:
609 raise util.Abort(commitinfo.splitlines()[-1])
609 raise util.Abort(commitinfo.splitlines()[-1])
610 newrev = newrev.groups()[0]
610 newrev = newrev.groups()[0]
611 self._ui.status(self._svncommand(['update', '-r', newrev]))
611 self._ui.status(self._svncommand(['update', '-r', newrev]))
612 return newrev
612 return newrev
613
613
614 def remove(self):
614 def remove(self):
615 if self.dirty():
615 if self.dirty():
616 self._ui.warn(_('not removing repo %s because '
616 self._ui.warn(_('not removing repo %s because '
617 'it has changes.\n' % self._path))
617 'it has changes.\n' % self._path))
618 return
618 return
619 self._ui.note(_('removing subrepo %s\n') % self._path)
619 self._ui.note(_('removing subrepo %s\n') % self._path)
620
620
621 def onerror(function, path, excinfo):
621 def onerror(function, path, excinfo):
622 if function is not os.remove:
622 if function is not os.remove:
623 raise
623 raise
624 # read-only files cannot be unlinked under Windows
624 # read-only files cannot be unlinked under Windows
625 s = os.stat(path)
625 s = os.stat(path)
626 if (s.st_mode & stat.S_IWRITE) != 0:
626 if (s.st_mode & stat.S_IWRITE) != 0:
627 raise
627 raise
628 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
628 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
629 os.remove(path)
629 os.remove(path)
630
630
631 path = self._ctx._repo.wjoin(self._path)
631 path = self._ctx._repo.wjoin(self._path)
632 shutil.rmtree(path, onerror=onerror)
632 shutil.rmtree(path, onerror=onerror)
633 try:
633 try:
634 os.removedirs(os.path.dirname(path))
634 os.removedirs(os.path.dirname(path))
635 except OSError:
635 except OSError:
636 pass
636 pass
637
637
638 def get(self, state, overwrite=False):
638 def get(self, state, overwrite=False):
639 if overwrite:
639 if overwrite:
640 self._svncommand(['revert', '--recursive'])
640 self._svncommand(['revert', '--recursive'])
641 args = ['checkout']
641 args = ['checkout']
642 if self._svnversion >= (1, 5):
642 if self._svnversion >= (1, 5):
643 args.append('--force')
643 args.append('--force')
644 args.extend([state[0], '--revision', state[1]])
644 args.extend([state[0], '--revision', state[1]])
645 status = self._svncommand(args)
645 status = self._svncommand(args)
646 if not re.search('Checked out revision [0-9]+.', status):
646 if not re.search('Checked out revision [0-9]+.', status):
647 raise util.Abort(status.splitlines()[-1])
647 raise util.Abort(status.splitlines()[-1])
648 self._ui.status(status)
648 self._ui.status(status)
649
649
650 def merge(self, state):
650 def merge(self, state):
651 old = self._state[1]
651 old = self._state[1]
652 new = state[1]
652 new = state[1]
653 if new != self._wcrev():
653 if new != self._wcrev():
654 dirty = old == self._wcrev() or self._wcchanged()[0]
654 dirty = old == self._wcrev() or self._wcchanged()[0]
655 if _updateprompt(self._ui, self, dirty, self._wcrev(), new):
655 if _updateprompt(self._ui, self, dirty, self._wcrev(), new):
656 self.get(state, False)
656 self.get(state, False)
657
657
658 def push(self, force):
658 def push(self, force):
659 # push is a no-op for SVN
659 # push is a no-op for SVN
660 return True
660 return True
661
661
662 def files(self):
662 def files(self):
663 output = self._svncommand(['list'])
663 output = self._svncommand(['list'])
664 # This works because svn forbids \n in filenames.
664 # This works because svn forbids \n in filenames.
665 return output.splitlines()
665 return output.splitlines()
666
666
667 def filedata(self, name):
667 def filedata(self, name):
668 return self._svncommand(['cat'], name)
668 return self._svncommand(['cat'], name)
669
669
670
670
671 class gitsubrepo(abstractsubrepo):
671 class gitsubrepo(abstractsubrepo):
672 def __init__(self, ctx, path, state):
672 def __init__(self, ctx, path, state):
673 # TODO add git version check.
673 # TODO add git version check.
674 self._state = state
674 self._state = state
675 self._ctx = ctx
675 self._ctx = ctx
676 self._path = path
676 self._path = path
677 self._relpath = os.path.join(reporelpath(ctx._repo), path)
677 self._relpath = os.path.join(reporelpath(ctx._repo), path)
678 self._abspath = ctx._repo.wjoin(path)
678 self._abspath = ctx._repo.wjoin(path)
679 self._subparent = ctx._repo
679 self._subparent = ctx._repo
680 self._ui = ctx._repo.ui
680 self._ui = ctx._repo.ui
681
681
682 def _gitcommand(self, commands, env=None, stream=False):
682 def _gitcommand(self, commands, env=None, stream=False):
683 return self._gitdir(commands, env=env, stream=stream)[0]
683 return self._gitdir(commands, env=env, stream=stream)[0]
684
684
685 def _gitdir(self, commands, env=None, stream=False):
685 def _gitdir(self, commands, env=None, stream=False):
686 return self._gitnodir(commands, env=env, stream=stream,
686 return self._gitnodir(commands, env=env, stream=stream,
687 cwd=self._abspath)
687 cwd=self._abspath)
688
688
689 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
689 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
690 """Calls the git command
690 """Calls the git command
691
691
692 The methods tries to call the git command. versions previor to 1.6.0
692 The methods tries to call the git command. versions previor to 1.6.0
693 are not supported and very probably fail.
693 are not supported and very probably fail.
694 """
694 """
695 self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
695 self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
696 # unless ui.quiet is set, print git's stderr,
696 # unless ui.quiet is set, print git's stderr,
697 # which is mostly progress and useful info
697 # which is mostly progress and useful info
698 errpipe = None
698 errpipe = None
699 if self._ui.quiet:
699 if self._ui.quiet:
700 errpipe = open(os.devnull, 'w')
700 errpipe = open(os.devnull, 'w')
701 p = subprocess.Popen(['git'] + commands, bufsize=-1, cwd=cwd, env=env,
701 p = subprocess.Popen(['git'] + commands, bufsize=-1, cwd=cwd, env=env,
702 close_fds=util.closefds,
702 close_fds=util.closefds,
703 stdout=subprocess.PIPE, stderr=errpipe)
703 stdout=subprocess.PIPE, stderr=errpipe)
704 if stream:
704 if stream:
705 return p.stdout, None
705 return p.stdout, None
706
706
707 retdata = p.stdout.read().strip()
707 retdata = p.stdout.read().strip()
708 # wait for the child to exit to avoid race condition.
708 # wait for the child to exit to avoid race condition.
709 p.wait()
709 p.wait()
710
710
711 if p.returncode != 0 and p.returncode != 1:
711 if p.returncode != 0 and p.returncode != 1:
712 # there are certain error codes that are ok
712 # there are certain error codes that are ok
713 command = commands[0]
713 command = commands[0]
714 if command in ('cat-file', 'symbolic-ref'):
714 if command in ('cat-file', 'symbolic-ref'):
715 return retdata, p.returncode
715 return retdata, p.returncode
716 # for all others, abort
716 # for all others, abort
717 raise util.Abort('git %s error %d in %s' %
717 raise util.Abort('git %s error %d in %s' %
718 (command, p.returncode, self._relpath))
718 (command, p.returncode, self._relpath))
719
719
720 return retdata, p.returncode
720 return retdata, p.returncode
721
721
722 def _gitmissing(self):
722 def _gitmissing(self):
723 return not os.path.exists(os.path.join(self._abspath, '.git'))
723 return not os.path.exists(os.path.join(self._abspath, '.git'))
724
724
725 def _gitstate(self):
725 def _gitstate(self):
726 return self._gitcommand(['rev-parse', 'HEAD'])
726 return self._gitcommand(['rev-parse', 'HEAD'])
727
727
728 def _gitcurrentbranch(self):
728 def _gitcurrentbranch(self):
729 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
729 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
730 if err:
730 if err:
731 current = None
731 current = None
732 return current
732 return current
733
733
734 def _gitremote(self, remote):
734 def _gitremote(self, remote):
735 out = self._gitcommand(['remote', 'show', '-n', remote])
735 out = self._gitcommand(['remote', 'show', '-n', remote])
736 line = out.split('\n')[1]
736 line = out.split('\n')[1]
737 i = line.index('URL: ') + len('URL: ')
737 i = line.index('URL: ') + len('URL: ')
738 return line[i:]
738 return line[i:]
739
739
740 def _githavelocally(self, revision):
740 def _githavelocally(self, revision):
741 out, code = self._gitdir(['cat-file', '-e', revision])
741 out, code = self._gitdir(['cat-file', '-e', revision])
742 return code == 0
742 return code == 0
743
743
744 def _gitisancestor(self, r1, r2):
744 def _gitisancestor(self, r1, r2):
745 base = self._gitcommand(['merge-base', r1, r2])
745 base = self._gitcommand(['merge-base', r1, r2])
746 return base == r1
746 return base == r1
747
747
748 def _gitbranchmap(self):
748 def _gitbranchmap(self):
749 '''returns 2 things:
749 '''returns 2 things:
750 a map from git branch to revision
750 a map from git branch to revision
751 a map from revision to branches'''
751 a map from revision to branches'''
752 branch2rev = {}
752 branch2rev = {}
753 rev2branch = {}
753 rev2branch = {}
754
754
755 out = self._gitcommand(['for-each-ref', '--format',
755 out = self._gitcommand(['for-each-ref', '--format',
756 '%(objectname) %(refname)'])
756 '%(objectname) %(refname)'])
757 for line in out.split('\n'):
757 for line in out.split('\n'):
758 revision, ref = line.split(' ')
758 revision, ref = line.split(' ')
759 if (not ref.startswith('refs/heads/') and
759 if (not ref.startswith('refs/heads/') and
760 not ref.startswith('refs/remotes/')):
760 not ref.startswith('refs/remotes/')):
761 continue
761 continue
762 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
762 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
763 continue # ignore remote/HEAD redirects
763 continue # ignore remote/HEAD redirects
764 branch2rev[ref] = revision
764 branch2rev[ref] = revision
765 rev2branch.setdefault(revision, []).append(ref)
765 rev2branch.setdefault(revision, []).append(ref)
766 return branch2rev, rev2branch
766 return branch2rev, rev2branch
767
767
768 def _gittracking(self, branches):
768 def _gittracking(self, branches):
769 'return map of remote branch to local tracking branch'
769 'return map of remote branch to local tracking branch'
770 # assumes no more than one local tracking branch for each remote
770 # assumes no more than one local tracking branch for each remote
771 tracking = {}
771 tracking = {}
772 for b in branches:
772 for b in branches:
773 if b.startswith('refs/remotes/'):
773 if b.startswith('refs/remotes/'):
774 continue
774 continue
775 remote = self._gitcommand(['config', 'branch.%s.remote' % b])
775 remote = self._gitcommand(['config', 'branch.%s.remote' % b])
776 if remote:
776 if remote:
777 ref = self._gitcommand(['config', 'branch.%s.merge' % b])
777 ref = self._gitcommand(['config', 'branch.%s.merge' % b])
778 tracking['refs/remotes/%s/%s' %
778 tracking['refs/remotes/%s/%s' %
779 (remote, ref.split('/', 2)[2])] = b
779 (remote, ref.split('/', 2)[2])] = b
780 return tracking
780 return tracking
781
781
782 def _abssource(self, source):
782 def _abssource(self, source):
783 if '://' not in source:
783 if '://' not in source:
784 # recognize the scp syntax as an absolute source
784 # recognize the scp syntax as an absolute source
785 colon = source.find(':')
785 colon = source.find(':')
786 if colon != -1 and '/' not in source[:colon]:
786 if colon != -1 and '/' not in source[:colon]:
787 return source
787 return source
788 self._subsource = source
788 self._subsource = source
789 return _abssource(self)
789 return _abssource(self)
790
790
791 def _fetch(self, source, revision):
791 def _fetch(self, source, revision):
792 if self._gitmissing():
792 if self._gitmissing():
793 source = self._abssource(source)
793 source = self._abssource(source)
794 self._ui.status(_('cloning subrepo %s from %s\n') %
794 self._ui.status(_('cloning subrepo %s from %s\n') %
795 (self._relpath, source))
795 (self._relpath, source))
796 self._gitnodir(['clone', source, self._abspath])
796 self._gitnodir(['clone', source, self._abspath])
797 if self._githavelocally(revision):
797 if self._githavelocally(revision):
798 return
798 return
799 self._ui.status(_('pulling subrepo %s from %s\n') %
799 self._ui.status(_('pulling subrepo %s from %s\n') %
800 (self._relpath, self._gitremote('origin')))
800 (self._relpath, self._gitremote('origin')))
801 # try only origin: the originally cloned repo
801 # try only origin: the originally cloned repo
802 self._gitcommand(['fetch'])
802 self._gitcommand(['fetch'])
803 if not self._githavelocally(revision):
803 if not self._githavelocally(revision):
804 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
804 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
805 (revision, self._relpath))
805 (revision, self._relpath))
806
806
807 def dirty(self, ignoreupdate=False):
807 def dirty(self, ignoreupdate=False):
808 if self._gitmissing():
808 if self._gitmissing():
809 return True
809 return True
810 if not ignoreupdate and self._state[1] != self._gitstate():
810 if not ignoreupdate and self._state[1] != self._gitstate():
811 # different version checked out
811 # different version checked out
812 return True
812 return True
813 # check for staged changes or modified files; ignore untracked files
813 # check for staged changes or modified files; ignore untracked files
814 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
814 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
815 return code == 1
815 return code == 1
816
816
817 def get(self, state, overwrite=False):
817 def get(self, state, overwrite=False):
818 source, revision, kind = state
818 source, revision, kind = state
819 self._fetch(source, revision)
819 self._fetch(source, revision)
820 # if the repo was set to be bare, unbare it
820 # if the repo was set to be bare, unbare it
821 if self._gitcommand(['config', '--bool', 'core.bare']) == 'true':
821 if self._gitcommand(['config', '--bool', 'core.bare']) == 'true':
822 self._gitcommand(['config', 'core.bare', 'false'])
822 self._gitcommand(['config', 'core.bare', 'false'])
823 if self._gitstate() == revision:
823 if self._gitstate() == revision:
824 self._gitcommand(['reset', '--hard', 'HEAD'])
824 self._gitcommand(['reset', '--hard', 'HEAD'])
825 return
825 return
826 elif self._gitstate() == revision:
826 elif self._gitstate() == revision:
827 if overwrite:
827 if overwrite:
828 # first reset the index to unmark new files for commit, because
828 # first reset the index to unmark new files for commit, because
829 # reset --hard will otherwise throw away files added for commit,
829 # reset --hard will otherwise throw away files added for commit,
830 # not just unmark them.
830 # not just unmark them.
831 self._gitcommand(['reset', 'HEAD'])
831 self._gitcommand(['reset', 'HEAD'])
832 self._gitcommand(['reset', '--hard', 'HEAD'])
832 self._gitcommand(['reset', '--hard', 'HEAD'])
833 return
833 return
834 branch2rev, rev2branch = self._gitbranchmap()
834 branch2rev, rev2branch = self._gitbranchmap()
835
835
836 def checkout(args):
836 def checkout(args):
837 cmd = ['checkout']
837 cmd = ['checkout']
838 if overwrite:
838 if overwrite:
839 # first reset the index to unmark new files for commit, because
839 # first reset the index to unmark new files for commit, because
840 # the -f option will otherwise throw away files added for
840 # the -f option will otherwise throw away files added for
841 # commit, not just unmark them.
841 # commit, not just unmark them.
842 self._gitcommand(['reset', 'HEAD'])
842 self._gitcommand(['reset', 'HEAD'])
843 cmd.append('-f')
843 cmd.append('-f')
844 self._gitcommand(cmd + args)
844 self._gitcommand(cmd + args)
845
845
846 def rawcheckout():
846 def rawcheckout():
847 # no branch to checkout, check it out with no branch
847 # no branch to checkout, check it out with no branch
848 self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
848 self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
849 self._relpath)
849 self._relpath)
850 self._ui.warn(_('check out a git branch if you intend '
850 self._ui.warn(_('check out a git branch if you intend '
851 'to make changes\n'))
851 'to make changes\n'))
852 checkout(['-q', revision])
852 checkout(['-q', revision])
853
853
854 if revision not in rev2branch:
854 if revision not in rev2branch:
855 rawcheckout()
855 rawcheckout()
856 return
856 return
857 branches = rev2branch[revision]
857 branches = rev2branch[revision]
858 firstlocalbranch = None
858 firstlocalbranch = None
859 for b in branches:
859 for b in branches:
860 if b == 'refs/heads/master':
860 if b == 'refs/heads/master':
861 # master trumps all other branches
861 # master trumps all other branches
862 checkout(['refs/heads/master'])
862 checkout(['refs/heads/master'])
863 return
863 return
864 if not firstlocalbranch and not b.startswith('refs/remotes/'):
864 if not firstlocalbranch and not b.startswith('refs/remotes/'):
865 firstlocalbranch = b
865 firstlocalbranch = b
866 if firstlocalbranch:
866 if firstlocalbranch:
867 checkout([firstlocalbranch])
867 checkout([firstlocalbranch])
868 return
868 return
869
869
870 tracking = self._gittracking(branch2rev.keys())
870 tracking = self._gittracking(branch2rev.keys())
871 # choose a remote branch already tracked if possible
871 # choose a remote branch already tracked if possible
872 remote = branches[0]
872 remote = branches[0]
873 if remote not in tracking:
873 if remote not in tracking:
874 for b in branches:
874 for b in branches:
875 if b in tracking:
875 if b in tracking:
876 remote = b
876 remote = b
877 break
877 break
878
878
879 if remote not in tracking:
879 if remote not in tracking:
880 # create a new local tracking branch
880 # create a new local tracking branch
881 local = remote.split('/', 2)[2]
881 local = remote.split('/', 2)[2]
882 checkout(['-b', local, remote])
882 checkout(['-b', local, remote])
883 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
883 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
884 # When updating to a tracked remote branch,
884 # When updating to a tracked remote branch,
885 # if the local tracking branch is downstream of it,
885 # if the local tracking branch is downstream of it,
886 # a normal `git pull` would have performed a "fast-forward merge"
886 # a normal `git pull` would have performed a "fast-forward merge"
887 # which is equivalent to updating the local branch to the remote.
887 # which is equivalent to updating the local branch to the remote.
888 # Since we are only looking at branching at update, we need to
888 # Since we are only looking at branching at update, we need to
889 # detect this situation and perform this action lazily.
889 # detect this situation and perform this action lazily.
890 if tracking[remote] != self._gitcurrentbranch():
890 if tracking[remote] != self._gitcurrentbranch():
891 checkout([tracking[remote]])
891 checkout([tracking[remote]])
892 self._gitcommand(['merge', '--ff', remote])
892 self._gitcommand(['merge', '--ff', remote])
893 else:
893 else:
894 # a real merge would be required, just checkout the revision
894 # a real merge would be required, just checkout the revision
895 rawcheckout()
895 rawcheckout()
896
896
897 def commit(self, text, user, date):
897 def commit(self, text, user, date):
898 if self._gitmissing():
898 if self._gitmissing():
899 raise util.Abort(_("subrepo %s is missing") % self._relpath)
899 raise util.Abort(_("subrepo %s is missing") % self._relpath)
900 cmd = ['commit', '-a', '-m', text]
900 cmd = ['commit', '-a', '-m', text]
901 env = os.environ.copy()
901 env = os.environ.copy()
902 if user:
902 if user:
903 cmd += ['--author', user]
903 cmd += ['--author', user]
904 if date:
904 if date:
905 # git's date parser silently ignores when seconds < 1e9
905 # git's date parser silently ignores when seconds < 1e9
906 # convert to ISO8601
906 # convert to ISO8601
907 env['GIT_AUTHOR_DATE'] = util.datestr(date,
907 env['GIT_AUTHOR_DATE'] = util.datestr(date,
908 '%Y-%m-%dT%H:%M:%S %1%2')
908 '%Y-%m-%dT%H:%M:%S %1%2')
909 self._gitcommand(cmd, env=env)
909 self._gitcommand(cmd, env=env)
910 # make sure commit works otherwise HEAD might not exist under certain
910 # make sure commit works otherwise HEAD might not exist under certain
911 # circumstances
911 # circumstances
912 return self._gitstate()
912 return self._gitstate()
913
913
914 def merge(self, state):
914 def merge(self, state):
915 source, revision, kind = state
915 source, revision, kind = state
916 self._fetch(source, revision)
916 self._fetch(source, revision)
917 base = self._gitcommand(['merge-base', revision, self._state[1]])
917 base = self._gitcommand(['merge-base', revision, self._state[1]])
918 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
918 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
919
919
920 def mergefunc():
920 def mergefunc():
921 if base == revision:
921 if base == revision:
922 self.get(state) # fast forward merge
922 self.get(state) # fast forward merge
923 elif base != self._state[1]:
923 elif base != self._state[1]:
924 self._gitcommand(['merge', '--no-commit', revision])
924 self._gitcommand(['merge', '--no-commit', revision])
925
925
926 if self.dirty():
926 if self.dirty():
927 if self._gitstate() != revision:
927 if self._gitstate() != revision:
928 dirty = self._gitstate() == self._state[1] or code != 0
928 dirty = self._gitstate() == self._state[1] or code != 0
929 if _updateprompt(self._ui, self, dirty,
929 if _updateprompt(self._ui, self, dirty,
930 self._state[1][:7], revision[:7]):
930 self._state[1][:7], revision[:7]):
931 mergefunc()
931 mergefunc()
932 else:
932 else:
933 mergefunc()
933 mergefunc()
934
934
935 def push(self, force):
935 def push(self, force):
936 if self._gitmissing():
936 if self._gitmissing():
937 raise util.Abort(_("subrepo %s is missing") % self._relpath)
937 raise util.Abort(_("subrepo %s is missing") % self._relpath)
938 # if a branch in origin contains the revision, nothing to do
938 # if a branch in origin contains the revision, nothing to do
939 branch2rev, rev2branch = self._gitbranchmap()
939 branch2rev, rev2branch = self._gitbranchmap()
940 if self._state[1] in rev2branch:
940 if self._state[1] in rev2branch:
941 for b in rev2branch[self._state[1]]:
941 for b in rev2branch[self._state[1]]:
942 if b.startswith('refs/remotes/origin/'):
942 if b.startswith('refs/remotes/origin/'):
943 return True
943 return True
944 for b, revision in branch2rev.iteritems():
944 for b, revision in branch2rev.iteritems():
945 if b.startswith('refs/remotes/origin/'):
945 if b.startswith('refs/remotes/origin/'):
946 if self._gitisancestor(self._state[1], revision):
946 if self._gitisancestor(self._state[1], revision):
947 return True
947 return True
948 # otherwise, try to push the currently checked out branch
948 # otherwise, try to push the currently checked out branch
949 cmd = ['push']
949 cmd = ['push']
950 if force:
950 if force:
951 cmd.append('--force')
951 cmd.append('--force')
952
952
953 current = self._gitcurrentbranch()
953 current = self._gitcurrentbranch()
954 if current:
954 if current:
955 # determine if the current branch is even useful
955 # determine if the current branch is even useful
956 if not self._gitisancestor(self._state[1], current):
956 if not self._gitisancestor(self._state[1], current):
957 self._ui.warn(_('unrelated git branch checked out '
957 self._ui.warn(_('unrelated git branch checked out '
958 'in subrepo %s\n') % self._relpath)
958 'in subrepo %s\n') % self._relpath)
959 return False
959 return False
960 self._ui.status(_('pushing branch %s of subrepo %s\n') %
960 self._ui.status(_('pushing branch %s of subrepo %s\n') %
961 (current.split('/', 2)[2], self._relpath))
961 (current.split('/', 2)[2], self._relpath))
962 self._gitcommand(cmd + ['origin', current])
962 self._gitcommand(cmd + ['origin', current])
963 return True
963 return True
964 else:
964 else:
965 self._ui.warn(_('no branch checked out in subrepo %s\n'
965 self._ui.warn(_('no branch checked out in subrepo %s\n'
966 'cannot push revision %s') %
966 'cannot push revision %s') %
967 (self._relpath, self._state[1]))
967 (self._relpath, self._state[1]))
968 return False
968 return False
969
969
970 def remove(self):
970 def remove(self):
971 if self._gitmissing():
971 if self._gitmissing():
972 return
972 return
973 if self.dirty():
973 if self.dirty():
974 self._ui.warn(_('not removing repo %s because '
974 self._ui.warn(_('not removing repo %s because '
975 'it has changes.\n') % self._relpath)
975 'it has changes.\n') % self._relpath)
976 return
976 return
977 # we can't fully delete the repository as it may contain
977 # we can't fully delete the repository as it may contain
978 # local-only history
978 # local-only history
979 self._ui.note(_('removing subrepo %s\n') % self._relpath)
979 self._ui.note(_('removing subrepo %s\n') % self._relpath)
980 self._gitcommand(['config', 'core.bare', 'true'])
980 self._gitcommand(['config', 'core.bare', 'true'])
981 for f in os.listdir(self._abspath):
981 for f in os.listdir(self._abspath):
982 if f == '.git':
982 if f == '.git':
983 continue
983 continue
984 path = os.path.join(self._abspath, f)
984 path = os.path.join(self._abspath, f)
985 if os.path.isdir(path) and not os.path.islink(path):
985 if os.path.isdir(path) and not os.path.islink(path):
986 shutil.rmtree(path)
986 shutil.rmtree(path)
987 else:
987 else:
988 os.remove(path)
988 os.remove(path)
989
989
990 def archive(self, ui, archiver, prefix):
990 def archive(self, ui, archiver, prefix):
991 source, revision = self._state
991 source, revision = self._state
992 self._fetch(source, revision)
992 self._fetch(source, revision)
993
993
994 # Parse git's native archive command.
994 # Parse git's native archive command.
995 # This should be much faster than manually traversing the trees
995 # This should be much faster than manually traversing the trees
996 # and objects with many subprocess calls.
996 # and objects with many subprocess calls.
997 tarstream = self._gitcommand(['archive', revision], stream=True)
997 tarstream = self._gitcommand(['archive', revision], stream=True)
998 tar = tarfile.open(fileobj=tarstream, mode='r|')
998 tar = tarfile.open(fileobj=tarstream, mode='r|')
999 relpath = subrelpath(self)
999 relpath = subrelpath(self)
1000 ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1000 ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1001 for i, info in enumerate(tar):
1001 for i, info in enumerate(tar):
1002 if info.isdir():
1002 if info.isdir():
1003 continue
1003 continue
1004 if info.issym():
1004 if info.issym():
1005 data = info.linkname
1005 data = info.linkname
1006 else:
1006 else:
1007 data = tar.extractfile(info).read()
1007 data = tar.extractfile(info).read()
1008 archiver.addfile(os.path.join(prefix, self._path, info.name),
1008 archiver.addfile(os.path.join(prefix, self._path, info.name),
1009 info.mode, info.issym(), data)
1009 info.mode, info.issym(), data)
1010 ui.progress(_('archiving (%s)') % relpath, i + 1,
1010 ui.progress(_('archiving (%s)') % relpath, i + 1,
1011 unit=_('files'))
1011 unit=_('files'))
1012 ui.progress(_('archiving (%s)') % relpath, None)
1012 ui.progress(_('archiving (%s)') % relpath, None)
1013
1013
1014
1014
1015 def status(self, rev2, **opts):
1015 def status(self, rev2, **opts):
1016 if self._gitmissing():
1016 if self._gitmissing():
1017 # if the repo is missing, return no results
1017 # if the repo is missing, return no results
1018 return [], [], [], [], [], [], []
1018 return [], [], [], [], [], [], []
1019 rev1 = self._state[1]
1019 rev1 = self._state[1]
1020 modified, added, removed = [], [], []
1020 modified, added, removed = [], [], []
1021 if rev2:
1021 if rev2:
1022 command = ['diff-tree', rev1, rev2]
1022 command = ['diff-tree', rev1, rev2]
1023 else:
1023 else:
1024 command = ['diff-index', rev1]
1024 command = ['diff-index', rev1]
1025 out = self._gitcommand(command)
1025 out = self._gitcommand(command)
1026 for line in out.split('\n'):
1026 for line in out.split('\n'):
1027 tab = line.find('\t')
1027 tab = line.find('\t')
1028 if tab == -1:
1028 if tab == -1:
1029 continue
1029 continue
1030 status, f = line[tab - 1], line[tab + 1:]
1030 status, f = line[tab - 1], line[tab + 1:]
1031 if status == 'M':
1031 if status == 'M':
1032 modified.append(f)
1032 modified.append(f)
1033 elif status == 'A':
1033 elif status == 'A':
1034 added.append(f)
1034 added.append(f)
1035 elif status == 'D':
1035 elif status == 'D':
1036 removed.append(f)
1036 removed.append(f)
1037
1037
1038 deleted = unknown = ignored = clean = []
1038 deleted = unknown = ignored = clean = []
1039 return modified, added, removed, deleted, unknown, ignored, clean
1039 return modified, added, removed, deleted, unknown, ignored, clean
1040
1040
1041 types = {
1041 types = {
1042 'hg': hgsubrepo,
1042 'hg': hgsubrepo,
1043 'svn': svnsubrepo,
1043 'svn': svnsubrepo,
1044 'git': gitsubrepo,
1044 'git': gitsubrepo,
1045 }
1045 }
@@ -1,643 +1,643 b''
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import errno, getpass, os, socket, sys, tempfile, traceback
9 import errno, getpass, os, socket, sys, tempfile, traceback
10 import config, scmutil, util, error, url
10 import config, scmutil, util, error
11
11
12 class ui(object):
12 class ui(object):
13 def __init__(self, src=None):
13 def __init__(self, src=None):
14 self._buffers = []
14 self._buffers = []
15 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
15 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
16 self._reportuntrusted = True
16 self._reportuntrusted = True
17 self._ocfg = config.config() # overlay
17 self._ocfg = config.config() # overlay
18 self._tcfg = config.config() # trusted
18 self._tcfg = config.config() # trusted
19 self._ucfg = config.config() # untrusted
19 self._ucfg = config.config() # untrusted
20 self._trustusers = set()
20 self._trustusers = set()
21 self._trustgroups = set()
21 self._trustgroups = set()
22
22
23 if src:
23 if src:
24 self._tcfg = src._tcfg.copy()
24 self._tcfg = src._tcfg.copy()
25 self._ucfg = src._ucfg.copy()
25 self._ucfg = src._ucfg.copy()
26 self._ocfg = src._ocfg.copy()
26 self._ocfg = src._ocfg.copy()
27 self._trustusers = src._trustusers.copy()
27 self._trustusers = src._trustusers.copy()
28 self._trustgroups = src._trustgroups.copy()
28 self._trustgroups = src._trustgroups.copy()
29 self.environ = src.environ
29 self.environ = src.environ
30 self.fixconfig()
30 self.fixconfig()
31 else:
31 else:
32 # shared read-only environment
32 # shared read-only environment
33 self.environ = os.environ
33 self.environ = os.environ
34 # we always trust global config files
34 # we always trust global config files
35 for f in scmutil.rcpath():
35 for f in scmutil.rcpath():
36 self.readconfig(f, trust=True)
36 self.readconfig(f, trust=True)
37
37
38 def copy(self):
38 def copy(self):
39 return self.__class__(self)
39 return self.__class__(self)
40
40
41 def _is_trusted(self, fp, f):
41 def _is_trusted(self, fp, f):
42 st = util.fstat(fp)
42 st = util.fstat(fp)
43 if util.isowner(st):
43 if util.isowner(st):
44 return True
44 return True
45
45
46 tusers, tgroups = self._trustusers, self._trustgroups
46 tusers, tgroups = self._trustusers, self._trustgroups
47 if '*' in tusers or '*' in tgroups:
47 if '*' in tusers or '*' in tgroups:
48 return True
48 return True
49
49
50 user = util.username(st.st_uid)
50 user = util.username(st.st_uid)
51 group = util.groupname(st.st_gid)
51 group = util.groupname(st.st_gid)
52 if user in tusers or group in tgroups or user == util.username():
52 if user in tusers or group in tgroups or user == util.username():
53 return True
53 return True
54
54
55 if self._reportuntrusted:
55 if self._reportuntrusted:
56 self.warn(_('Not trusting file %s from untrusted '
56 self.warn(_('Not trusting file %s from untrusted '
57 'user %s, group %s\n') % (f, user, group))
57 'user %s, group %s\n') % (f, user, group))
58 return False
58 return False
59
59
60 def readconfig(self, filename, root=None, trust=False,
60 def readconfig(self, filename, root=None, trust=False,
61 sections=None, remap=None):
61 sections=None, remap=None):
62 try:
62 try:
63 fp = open(filename)
63 fp = open(filename)
64 except IOError:
64 except IOError:
65 if not sections: # ignore unless we were looking for something
65 if not sections: # ignore unless we were looking for something
66 return
66 return
67 raise
67 raise
68
68
69 cfg = config.config()
69 cfg = config.config()
70 trusted = sections or trust or self._is_trusted(fp, filename)
70 trusted = sections or trust or self._is_trusted(fp, filename)
71
71
72 try:
72 try:
73 cfg.read(filename, fp, sections=sections, remap=remap)
73 cfg.read(filename, fp, sections=sections, remap=remap)
74 except error.ConfigError, inst:
74 except error.ConfigError, inst:
75 if trusted:
75 if trusted:
76 raise
76 raise
77 self.warn(_("Ignored: %s\n") % str(inst))
77 self.warn(_("Ignored: %s\n") % str(inst))
78
78
79 if self.plain():
79 if self.plain():
80 for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
80 for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
81 'logtemplate', 'style',
81 'logtemplate', 'style',
82 'traceback', 'verbose'):
82 'traceback', 'verbose'):
83 if k in cfg['ui']:
83 if k in cfg['ui']:
84 del cfg['ui'][k]
84 del cfg['ui'][k]
85 for k, v in cfg.items('alias'):
85 for k, v in cfg.items('alias'):
86 del cfg['alias'][k]
86 del cfg['alias'][k]
87 for k, v in cfg.items('defaults'):
87 for k, v in cfg.items('defaults'):
88 del cfg['defaults'][k]
88 del cfg['defaults'][k]
89
89
90 if trusted:
90 if trusted:
91 self._tcfg.update(cfg)
91 self._tcfg.update(cfg)
92 self._tcfg.update(self._ocfg)
92 self._tcfg.update(self._ocfg)
93 self._ucfg.update(cfg)
93 self._ucfg.update(cfg)
94 self._ucfg.update(self._ocfg)
94 self._ucfg.update(self._ocfg)
95
95
96 if root is None:
96 if root is None:
97 root = os.path.expanduser('~')
97 root = os.path.expanduser('~')
98 self.fixconfig(root=root)
98 self.fixconfig(root=root)
99
99
100 def fixconfig(self, root=None, section=None):
100 def fixconfig(self, root=None, section=None):
101 if section in (None, 'paths'):
101 if section in (None, 'paths'):
102 # expand vars and ~
102 # expand vars and ~
103 # translate paths relative to root (or home) into absolute paths
103 # translate paths relative to root (or home) into absolute paths
104 root = root or os.getcwd()
104 root = root or os.getcwd()
105 for c in self._tcfg, self._ucfg, self._ocfg:
105 for c in self._tcfg, self._ucfg, self._ocfg:
106 for n, p in c.items('paths'):
106 for n, p in c.items('paths'):
107 if not p:
107 if not p:
108 continue
108 continue
109 if '%%' in p:
109 if '%%' in p:
110 self.warn(_("(deprecated '%%' in path %s=%s from %s)\n")
110 self.warn(_("(deprecated '%%' in path %s=%s from %s)\n")
111 % (n, p, self.configsource('paths', n)))
111 % (n, p, self.configsource('paths', n)))
112 p = p.replace('%%', '%')
112 p = p.replace('%%', '%')
113 p = util.expandpath(p)
113 p = util.expandpath(p)
114 if not url.hasscheme(p) and not os.path.isabs(p):
114 if not util.hasscheme(p) and not os.path.isabs(p):
115 p = os.path.normpath(os.path.join(root, p))
115 p = os.path.normpath(os.path.join(root, p))
116 c.set("paths", n, p)
116 c.set("paths", n, p)
117
117
118 if section in (None, 'ui'):
118 if section in (None, 'ui'):
119 # update ui options
119 # update ui options
120 self.debugflag = self.configbool('ui', 'debug')
120 self.debugflag = self.configbool('ui', 'debug')
121 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
121 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
122 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
122 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
123 if self.verbose and self.quiet:
123 if self.verbose and self.quiet:
124 self.quiet = self.verbose = False
124 self.quiet = self.verbose = False
125 self._reportuntrusted = self.debugflag or self.configbool("ui",
125 self._reportuntrusted = self.debugflag or self.configbool("ui",
126 "report_untrusted", True)
126 "report_untrusted", True)
127 self.tracebackflag = self.configbool('ui', 'traceback', False)
127 self.tracebackflag = self.configbool('ui', 'traceback', False)
128
128
129 if section in (None, 'trusted'):
129 if section in (None, 'trusted'):
130 # update trust information
130 # update trust information
131 self._trustusers.update(self.configlist('trusted', 'users'))
131 self._trustusers.update(self.configlist('trusted', 'users'))
132 self._trustgroups.update(self.configlist('trusted', 'groups'))
132 self._trustgroups.update(self.configlist('trusted', 'groups'))
133
133
134 def setconfig(self, section, name, value, overlay=True):
134 def setconfig(self, section, name, value, overlay=True):
135 if overlay:
135 if overlay:
136 self._ocfg.set(section, name, value)
136 self._ocfg.set(section, name, value)
137 self._tcfg.set(section, name, value)
137 self._tcfg.set(section, name, value)
138 self._ucfg.set(section, name, value)
138 self._ucfg.set(section, name, value)
139 self.fixconfig(section=section)
139 self.fixconfig(section=section)
140
140
141 def _data(self, untrusted):
141 def _data(self, untrusted):
142 return untrusted and self._ucfg or self._tcfg
142 return untrusted and self._ucfg or self._tcfg
143
143
144 def configsource(self, section, name, untrusted=False):
144 def configsource(self, section, name, untrusted=False):
145 return self._data(untrusted).source(section, name) or 'none'
145 return self._data(untrusted).source(section, name) or 'none'
146
146
147 def config(self, section, name, default=None, untrusted=False):
147 def config(self, section, name, default=None, untrusted=False):
148 value = self._data(untrusted).get(section, name, default)
148 value = self._data(untrusted).get(section, name, default)
149 if self.debugflag and not untrusted and self._reportuntrusted:
149 if self.debugflag and not untrusted and self._reportuntrusted:
150 uvalue = self._ucfg.get(section, name)
150 uvalue = self._ucfg.get(section, name)
151 if uvalue is not None and uvalue != value:
151 if uvalue is not None and uvalue != value:
152 self.debug(_("ignoring untrusted configuration option "
152 self.debug(_("ignoring untrusted configuration option "
153 "%s.%s = %s\n") % (section, name, uvalue))
153 "%s.%s = %s\n") % (section, name, uvalue))
154 return value
154 return value
155
155
156 def configpath(self, section, name, default=None, untrusted=False):
156 def configpath(self, section, name, default=None, untrusted=False):
157 'get a path config item, expanded relative to config file'
157 'get a path config item, expanded relative to config file'
158 v = self.config(section, name, default, untrusted)
158 v = self.config(section, name, default, untrusted)
159 if not os.path.isabs(v) or "://" not in v:
159 if not os.path.isabs(v) or "://" not in v:
160 src = self.configsource(section, name, untrusted)
160 src = self.configsource(section, name, untrusted)
161 if ':' in src:
161 if ':' in src:
162 base = os.path.dirname(src.rsplit(':'))
162 base = os.path.dirname(src.rsplit(':'))
163 v = os.path.join(base, os.path.expanduser(v))
163 v = os.path.join(base, os.path.expanduser(v))
164 return v
164 return v
165
165
166 def configbool(self, section, name, default=False, untrusted=False):
166 def configbool(self, section, name, default=False, untrusted=False):
167 v = self.config(section, name, None, untrusted)
167 v = self.config(section, name, None, untrusted)
168 if v is None:
168 if v is None:
169 return default
169 return default
170 if isinstance(v, bool):
170 if isinstance(v, bool):
171 return v
171 return v
172 b = util.parsebool(v)
172 b = util.parsebool(v)
173 if b is None:
173 if b is None:
174 raise error.ConfigError(_("%s.%s not a boolean ('%s')")
174 raise error.ConfigError(_("%s.%s not a boolean ('%s')")
175 % (section, name, v))
175 % (section, name, v))
176 return b
176 return b
177
177
178 def configlist(self, section, name, default=None, untrusted=False):
178 def configlist(self, section, name, default=None, untrusted=False):
179 """Return a list of comma/space separated strings"""
179 """Return a list of comma/space separated strings"""
180
180
181 def _parse_plain(parts, s, offset):
181 def _parse_plain(parts, s, offset):
182 whitespace = False
182 whitespace = False
183 while offset < len(s) and (s[offset].isspace() or s[offset] == ','):
183 while offset < len(s) and (s[offset].isspace() or s[offset] == ','):
184 whitespace = True
184 whitespace = True
185 offset += 1
185 offset += 1
186 if offset >= len(s):
186 if offset >= len(s):
187 return None, parts, offset
187 return None, parts, offset
188 if whitespace:
188 if whitespace:
189 parts.append('')
189 parts.append('')
190 if s[offset] == '"' and not parts[-1]:
190 if s[offset] == '"' and not parts[-1]:
191 return _parse_quote, parts, offset + 1
191 return _parse_quote, parts, offset + 1
192 elif s[offset] == '"' and parts[-1][-1] == '\\':
192 elif s[offset] == '"' and parts[-1][-1] == '\\':
193 parts[-1] = parts[-1][:-1] + s[offset]
193 parts[-1] = parts[-1][:-1] + s[offset]
194 return _parse_plain, parts, offset + 1
194 return _parse_plain, parts, offset + 1
195 parts[-1] += s[offset]
195 parts[-1] += s[offset]
196 return _parse_plain, parts, offset + 1
196 return _parse_plain, parts, offset + 1
197
197
198 def _parse_quote(parts, s, offset):
198 def _parse_quote(parts, s, offset):
199 if offset < len(s) and s[offset] == '"': # ""
199 if offset < len(s) and s[offset] == '"': # ""
200 parts.append('')
200 parts.append('')
201 offset += 1
201 offset += 1
202 while offset < len(s) and (s[offset].isspace() or
202 while offset < len(s) and (s[offset].isspace() or
203 s[offset] == ','):
203 s[offset] == ','):
204 offset += 1
204 offset += 1
205 return _parse_plain, parts, offset
205 return _parse_plain, parts, offset
206
206
207 while offset < len(s) and s[offset] != '"':
207 while offset < len(s) and s[offset] != '"':
208 if (s[offset] == '\\' and offset + 1 < len(s)
208 if (s[offset] == '\\' and offset + 1 < len(s)
209 and s[offset + 1] == '"'):
209 and s[offset + 1] == '"'):
210 offset += 1
210 offset += 1
211 parts[-1] += '"'
211 parts[-1] += '"'
212 else:
212 else:
213 parts[-1] += s[offset]
213 parts[-1] += s[offset]
214 offset += 1
214 offset += 1
215
215
216 if offset >= len(s):
216 if offset >= len(s):
217 real_parts = _configlist(parts[-1])
217 real_parts = _configlist(parts[-1])
218 if not real_parts:
218 if not real_parts:
219 parts[-1] = '"'
219 parts[-1] = '"'
220 else:
220 else:
221 real_parts[0] = '"' + real_parts[0]
221 real_parts[0] = '"' + real_parts[0]
222 parts = parts[:-1]
222 parts = parts[:-1]
223 parts.extend(real_parts)
223 parts.extend(real_parts)
224 return None, parts, offset
224 return None, parts, offset
225
225
226 offset += 1
226 offset += 1
227 while offset < len(s) and s[offset] in [' ', ',']:
227 while offset < len(s) and s[offset] in [' ', ',']:
228 offset += 1
228 offset += 1
229
229
230 if offset < len(s):
230 if offset < len(s):
231 if offset + 1 == len(s) and s[offset] == '"':
231 if offset + 1 == len(s) and s[offset] == '"':
232 parts[-1] += '"'
232 parts[-1] += '"'
233 offset += 1
233 offset += 1
234 else:
234 else:
235 parts.append('')
235 parts.append('')
236 else:
236 else:
237 return None, parts, offset
237 return None, parts, offset
238
238
239 return _parse_plain, parts, offset
239 return _parse_plain, parts, offset
240
240
241 def _configlist(s):
241 def _configlist(s):
242 s = s.rstrip(' ,')
242 s = s.rstrip(' ,')
243 if not s:
243 if not s:
244 return []
244 return []
245 parser, parts, offset = _parse_plain, [''], 0
245 parser, parts, offset = _parse_plain, [''], 0
246 while parser:
246 while parser:
247 parser, parts, offset = parser(parts, s, offset)
247 parser, parts, offset = parser(parts, s, offset)
248 return parts
248 return parts
249
249
250 result = self.config(section, name, untrusted=untrusted)
250 result = self.config(section, name, untrusted=untrusted)
251 if result is None:
251 if result is None:
252 result = default or []
252 result = default or []
253 if isinstance(result, basestring):
253 if isinstance(result, basestring):
254 result = _configlist(result.lstrip(' ,\n'))
254 result = _configlist(result.lstrip(' ,\n'))
255 if result is None:
255 if result is None:
256 result = default or []
256 result = default or []
257 return result
257 return result
258
258
259 def has_section(self, section, untrusted=False):
259 def has_section(self, section, untrusted=False):
260 '''tell whether section exists in config.'''
260 '''tell whether section exists in config.'''
261 return section in self._data(untrusted)
261 return section in self._data(untrusted)
262
262
263 def configitems(self, section, untrusted=False):
263 def configitems(self, section, untrusted=False):
264 items = self._data(untrusted).items(section)
264 items = self._data(untrusted).items(section)
265 if self.debugflag and not untrusted and self._reportuntrusted:
265 if self.debugflag and not untrusted and self._reportuntrusted:
266 for k, v in self._ucfg.items(section):
266 for k, v in self._ucfg.items(section):
267 if self._tcfg.get(section, k) != v:
267 if self._tcfg.get(section, k) != v:
268 self.debug(_("ignoring untrusted configuration option "
268 self.debug(_("ignoring untrusted configuration option "
269 "%s.%s = %s\n") % (section, k, v))
269 "%s.%s = %s\n") % (section, k, v))
270 return items
270 return items
271
271
272 def walkconfig(self, untrusted=False):
272 def walkconfig(self, untrusted=False):
273 cfg = self._data(untrusted)
273 cfg = self._data(untrusted)
274 for section in cfg.sections():
274 for section in cfg.sections():
275 for name, value in self.configitems(section, untrusted):
275 for name, value in self.configitems(section, untrusted):
276 yield section, name, value
276 yield section, name, value
277
277
278 def plain(self):
278 def plain(self):
279 '''is plain mode active?
279 '''is plain mode active?
280
280
281 Plain mode means that all configuration variables which affect
281 Plain mode means that all configuration variables which affect
282 the behavior and output of Mercurial should be
282 the behavior and output of Mercurial should be
283 ignored. Additionally, the output should be stable,
283 ignored. Additionally, the output should be stable,
284 reproducible and suitable for use in scripts or applications.
284 reproducible and suitable for use in scripts or applications.
285
285
286 The only way to trigger plain mode is by setting either the
286 The only way to trigger plain mode is by setting either the
287 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
287 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
288
288
289 The return value can either be False, True, or a list of
289 The return value can either be False, True, or a list of
290 features that plain mode should not apply to (e.g., i18n,
290 features that plain mode should not apply to (e.g., i18n,
291 progress, etc).
291 progress, etc).
292 '''
292 '''
293 if 'HGPLAIN' not in os.environ and 'HGPLAINEXCEPT' not in os.environ:
293 if 'HGPLAIN' not in os.environ and 'HGPLAINEXCEPT' not in os.environ:
294 return False
294 return False
295 exceptions = os.environ.get('HGPLAINEXCEPT', '').strip().split(',')
295 exceptions = os.environ.get('HGPLAINEXCEPT', '').strip().split(',')
296 return exceptions or True
296 return exceptions or True
297
297
298 def username(self):
298 def username(self):
299 """Return default username to be used in commits.
299 """Return default username to be used in commits.
300
300
301 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
301 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
302 and stop searching if one of these is set.
302 and stop searching if one of these is set.
303 If not found and ui.askusername is True, ask the user, else use
303 If not found and ui.askusername is True, ask the user, else use
304 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
304 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
305 """
305 """
306 user = os.environ.get("HGUSER")
306 user = os.environ.get("HGUSER")
307 if user is None:
307 if user is None:
308 user = self.config("ui", "username")
308 user = self.config("ui", "username")
309 if user is not None:
309 if user is not None:
310 user = os.path.expandvars(user)
310 user = os.path.expandvars(user)
311 if user is None:
311 if user is None:
312 user = os.environ.get("EMAIL")
312 user = os.environ.get("EMAIL")
313 if user is None and self.configbool("ui", "askusername"):
313 if user is None and self.configbool("ui", "askusername"):
314 user = self.prompt(_("enter a commit username:"), default=None)
314 user = self.prompt(_("enter a commit username:"), default=None)
315 if user is None and not self.interactive():
315 if user is None and not self.interactive():
316 try:
316 try:
317 user = '%s@%s' % (util.getuser(), socket.getfqdn())
317 user = '%s@%s' % (util.getuser(), socket.getfqdn())
318 self.warn(_("No username found, using '%s' instead\n") % user)
318 self.warn(_("No username found, using '%s' instead\n") % user)
319 except KeyError:
319 except KeyError:
320 pass
320 pass
321 if not user:
321 if not user:
322 raise util.Abort(_('no username supplied (see "hg help config")'))
322 raise util.Abort(_('no username supplied (see "hg help config")'))
323 if "\n" in user:
323 if "\n" in user:
324 raise util.Abort(_("username %s contains a newline\n") % repr(user))
324 raise util.Abort(_("username %s contains a newline\n") % repr(user))
325 return user
325 return user
326
326
327 def shortuser(self, user):
327 def shortuser(self, user):
328 """Return a short representation of a user name or email address."""
328 """Return a short representation of a user name or email address."""
329 if not self.verbose:
329 if not self.verbose:
330 user = util.shortuser(user)
330 user = util.shortuser(user)
331 return user
331 return user
332
332
333 def expandpath(self, loc, default=None):
333 def expandpath(self, loc, default=None):
334 """Return repository location relative to cwd or from [paths]"""
334 """Return repository location relative to cwd or from [paths]"""
335 if url.hasscheme(loc) or os.path.isdir(os.path.join(loc, '.hg')):
335 if util.hasscheme(loc) or os.path.isdir(os.path.join(loc, '.hg')):
336 return loc
336 return loc
337
337
338 path = self.config('paths', loc)
338 path = self.config('paths', loc)
339 if not path and default is not None:
339 if not path and default is not None:
340 path = self.config('paths', default)
340 path = self.config('paths', default)
341 return path or loc
341 return path or loc
342
342
343 def pushbuffer(self):
343 def pushbuffer(self):
344 self._buffers.append([])
344 self._buffers.append([])
345
345
346 def popbuffer(self, labeled=False):
346 def popbuffer(self, labeled=False):
347 '''pop the last buffer and return the buffered output
347 '''pop the last buffer and return the buffered output
348
348
349 If labeled is True, any labels associated with buffered
349 If labeled is True, any labels associated with buffered
350 output will be handled. By default, this has no effect
350 output will be handled. By default, this has no effect
351 on the output returned, but extensions and GUI tools may
351 on the output returned, but extensions and GUI tools may
352 handle this argument and returned styled output. If output
352 handle this argument and returned styled output. If output
353 is being buffered so it can be captured and parsed or
353 is being buffered so it can be captured and parsed or
354 processed, labeled should not be set to True.
354 processed, labeled should not be set to True.
355 '''
355 '''
356 return "".join(self._buffers.pop())
356 return "".join(self._buffers.pop())
357
357
358 def write(self, *args, **opts):
358 def write(self, *args, **opts):
359 '''write args to output
359 '''write args to output
360
360
361 By default, this method simply writes to the buffer or stdout,
361 By default, this method simply writes to the buffer or stdout,
362 but extensions or GUI tools may override this method,
362 but extensions or GUI tools may override this method,
363 write_err(), popbuffer(), and label() to style output from
363 write_err(), popbuffer(), and label() to style output from
364 various parts of hg.
364 various parts of hg.
365
365
366 An optional keyword argument, "label", can be passed in.
366 An optional keyword argument, "label", can be passed in.
367 This should be a string containing label names separated by
367 This should be a string containing label names separated by
368 space. Label names take the form of "topic.type". For example,
368 space. Label names take the form of "topic.type". For example,
369 ui.debug() issues a label of "ui.debug".
369 ui.debug() issues a label of "ui.debug".
370
370
371 When labeling output for a specific command, a label of
371 When labeling output for a specific command, a label of
372 "cmdname.type" is recommended. For example, status issues
372 "cmdname.type" is recommended. For example, status issues
373 a label of "status.modified" for modified files.
373 a label of "status.modified" for modified files.
374 '''
374 '''
375 if self._buffers:
375 if self._buffers:
376 self._buffers[-1].extend([str(a) for a in args])
376 self._buffers[-1].extend([str(a) for a in args])
377 else:
377 else:
378 for a in args:
378 for a in args:
379 sys.stdout.write(str(a))
379 sys.stdout.write(str(a))
380
380
381 def write_err(self, *args, **opts):
381 def write_err(self, *args, **opts):
382 try:
382 try:
383 if not getattr(sys.stdout, 'closed', False):
383 if not getattr(sys.stdout, 'closed', False):
384 sys.stdout.flush()
384 sys.stdout.flush()
385 for a in args:
385 for a in args:
386 sys.stderr.write(str(a))
386 sys.stderr.write(str(a))
387 # stderr may be buffered under win32 when redirected to files,
387 # stderr may be buffered under win32 when redirected to files,
388 # including stdout.
388 # including stdout.
389 if not getattr(sys.stderr, 'closed', False):
389 if not getattr(sys.stderr, 'closed', False):
390 sys.stderr.flush()
390 sys.stderr.flush()
391 except IOError, inst:
391 except IOError, inst:
392 if inst.errno not in (errno.EPIPE, errno.EIO):
392 if inst.errno not in (errno.EPIPE, errno.EIO):
393 raise
393 raise
394
394
395 def flush(self):
395 def flush(self):
396 try: sys.stdout.flush()
396 try: sys.stdout.flush()
397 except: pass
397 except: pass
398 try: sys.stderr.flush()
398 try: sys.stderr.flush()
399 except: pass
399 except: pass
400
400
401 def interactive(self):
401 def interactive(self):
402 '''is interactive input allowed?
402 '''is interactive input allowed?
403
403
404 An interactive session is a session where input can be reasonably read
404 An interactive session is a session where input can be reasonably read
405 from `sys.stdin'. If this function returns false, any attempt to read
405 from `sys.stdin'. If this function returns false, any attempt to read
406 from stdin should fail with an error, unless a sensible default has been
406 from stdin should fail with an error, unless a sensible default has been
407 specified.
407 specified.
408
408
409 Interactiveness is triggered by the value of the `ui.interactive'
409 Interactiveness is triggered by the value of the `ui.interactive'
410 configuration variable or - if it is unset - when `sys.stdin' points
410 configuration variable or - if it is unset - when `sys.stdin' points
411 to a terminal device.
411 to a terminal device.
412
412
413 This function refers to input only; for output, see `ui.formatted()'.
413 This function refers to input only; for output, see `ui.formatted()'.
414 '''
414 '''
415 i = self.configbool("ui", "interactive", None)
415 i = self.configbool("ui", "interactive", None)
416 if i is None:
416 if i is None:
417 try:
417 try:
418 return sys.stdin.isatty()
418 return sys.stdin.isatty()
419 except AttributeError:
419 except AttributeError:
420 # some environments replace stdin without implementing isatty
420 # some environments replace stdin without implementing isatty
421 # usually those are non-interactive
421 # usually those are non-interactive
422 return False
422 return False
423
423
424 return i
424 return i
425
425
426 def termwidth(self):
426 def termwidth(self):
427 '''how wide is the terminal in columns?
427 '''how wide is the terminal in columns?
428 '''
428 '''
429 if 'COLUMNS' in os.environ:
429 if 'COLUMNS' in os.environ:
430 try:
430 try:
431 return int(os.environ['COLUMNS'])
431 return int(os.environ['COLUMNS'])
432 except ValueError:
432 except ValueError:
433 pass
433 pass
434 return util.termwidth()
434 return util.termwidth()
435
435
436 def formatted(self):
436 def formatted(self):
437 '''should formatted output be used?
437 '''should formatted output be used?
438
438
439 It is often desirable to format the output to suite the output medium.
439 It is often desirable to format the output to suite the output medium.
440 Examples of this are truncating long lines or colorizing messages.
440 Examples of this are truncating long lines or colorizing messages.
441 However, this is not often not desirable when piping output into other
441 However, this is not often not desirable when piping output into other
442 utilities, e.g. `grep'.
442 utilities, e.g. `grep'.
443
443
444 Formatted output is triggered by the value of the `ui.formatted'
444 Formatted output is triggered by the value of the `ui.formatted'
445 configuration variable or - if it is unset - when `sys.stdout' points
445 configuration variable or - if it is unset - when `sys.stdout' points
446 to a terminal device. Please note that `ui.formatted' should be
446 to a terminal device. Please note that `ui.formatted' should be
447 considered an implementation detail; it is not intended for use outside
447 considered an implementation detail; it is not intended for use outside
448 Mercurial or its extensions.
448 Mercurial or its extensions.
449
449
450 This function refers to output only; for input, see `ui.interactive()'.
450 This function refers to output only; for input, see `ui.interactive()'.
451 This function always returns false when in plain mode, see `ui.plain()'.
451 This function always returns false when in plain mode, see `ui.plain()'.
452 '''
452 '''
453 if self.plain():
453 if self.plain():
454 return False
454 return False
455
455
456 i = self.configbool("ui", "formatted", None)
456 i = self.configbool("ui", "formatted", None)
457 if i is None:
457 if i is None:
458 try:
458 try:
459 return sys.stdout.isatty()
459 return sys.stdout.isatty()
460 except AttributeError:
460 except AttributeError:
461 # some environments replace stdout without implementing isatty
461 # some environments replace stdout without implementing isatty
462 # usually those are non-interactive
462 # usually those are non-interactive
463 return False
463 return False
464
464
465 return i
465 return i
466
466
467 def _readline(self, prompt=''):
467 def _readline(self, prompt=''):
468 if sys.stdin.isatty():
468 if sys.stdin.isatty():
469 try:
469 try:
470 # magically add command line editing support, where
470 # magically add command line editing support, where
471 # available
471 # available
472 import readline
472 import readline
473 # force demandimport to really load the module
473 # force demandimport to really load the module
474 readline.read_history_file
474 readline.read_history_file
475 # windows sometimes raises something other than ImportError
475 # windows sometimes raises something other than ImportError
476 except Exception:
476 except Exception:
477 pass
477 pass
478 line = raw_input(prompt)
478 line = raw_input(prompt)
479 # When stdin is in binary mode on Windows, it can cause
479 # When stdin is in binary mode on Windows, it can cause
480 # raw_input() to emit an extra trailing carriage return
480 # raw_input() to emit an extra trailing carriage return
481 if os.linesep == '\r\n' and line and line[-1] == '\r':
481 if os.linesep == '\r\n' and line and line[-1] == '\r':
482 line = line[:-1]
482 line = line[:-1]
483 return line
483 return line
484
484
485 def prompt(self, msg, default="y"):
485 def prompt(self, msg, default="y"):
486 """Prompt user with msg, read response.
486 """Prompt user with msg, read response.
487 If ui is not interactive, the default is returned.
487 If ui is not interactive, the default is returned.
488 """
488 """
489 if not self.interactive():
489 if not self.interactive():
490 self.write(msg, ' ', default, "\n")
490 self.write(msg, ' ', default, "\n")
491 return default
491 return default
492 try:
492 try:
493 r = self._readline(self.label(msg, 'ui.prompt') + ' ')
493 r = self._readline(self.label(msg, 'ui.prompt') + ' ')
494 if not r:
494 if not r:
495 return default
495 return default
496 return r
496 return r
497 except EOFError:
497 except EOFError:
498 raise util.Abort(_('response expected'))
498 raise util.Abort(_('response expected'))
499
499
500 def promptchoice(self, msg, choices, default=0):
500 def promptchoice(self, msg, choices, default=0):
501 """Prompt user with msg, read response, and ensure it matches
501 """Prompt user with msg, read response, and ensure it matches
502 one of the provided choices. The index of the choice is returned.
502 one of the provided choices. The index of the choice is returned.
503 choices is a sequence of acceptable responses with the format:
503 choices is a sequence of acceptable responses with the format:
504 ('&None', 'E&xec', 'Sym&link') Responses are case insensitive.
504 ('&None', 'E&xec', 'Sym&link') Responses are case insensitive.
505 If ui is not interactive, the default is returned.
505 If ui is not interactive, the default is returned.
506 """
506 """
507 resps = [s[s.index('&')+1].lower() for s in choices]
507 resps = [s[s.index('&')+1].lower() for s in choices]
508 while True:
508 while True:
509 r = self.prompt(msg, resps[default])
509 r = self.prompt(msg, resps[default])
510 if r.lower() in resps:
510 if r.lower() in resps:
511 return resps.index(r.lower())
511 return resps.index(r.lower())
512 self.write(_("unrecognized response\n"))
512 self.write(_("unrecognized response\n"))
513
513
514 def getpass(self, prompt=None, default=None):
514 def getpass(self, prompt=None, default=None):
515 if not self.interactive():
515 if not self.interactive():
516 return default
516 return default
517 try:
517 try:
518 return getpass.getpass(prompt or _('password: '))
518 return getpass.getpass(prompt or _('password: '))
519 except EOFError:
519 except EOFError:
520 raise util.Abort(_('response expected'))
520 raise util.Abort(_('response expected'))
521 def status(self, *msg, **opts):
521 def status(self, *msg, **opts):
522 '''write status message to output (if ui.quiet is False)
522 '''write status message to output (if ui.quiet is False)
523
523
524 This adds an output label of "ui.status".
524 This adds an output label of "ui.status".
525 '''
525 '''
526 if not self.quiet:
526 if not self.quiet:
527 opts['label'] = opts.get('label', '') + ' ui.status'
527 opts['label'] = opts.get('label', '') + ' ui.status'
528 self.write(*msg, **opts)
528 self.write(*msg, **opts)
529 def warn(self, *msg, **opts):
529 def warn(self, *msg, **opts):
530 '''write warning message to output (stderr)
530 '''write warning message to output (stderr)
531
531
532 This adds an output label of "ui.warning".
532 This adds an output label of "ui.warning".
533 '''
533 '''
534 opts['label'] = opts.get('label', '') + ' ui.warning'
534 opts['label'] = opts.get('label', '') + ' ui.warning'
535 self.write_err(*msg, **opts)
535 self.write_err(*msg, **opts)
536 def note(self, *msg, **opts):
536 def note(self, *msg, **opts):
537 '''write note to output (if ui.verbose is True)
537 '''write note to output (if ui.verbose is True)
538
538
539 This adds an output label of "ui.note".
539 This adds an output label of "ui.note".
540 '''
540 '''
541 if self.verbose:
541 if self.verbose:
542 opts['label'] = opts.get('label', '') + ' ui.note'
542 opts['label'] = opts.get('label', '') + ' ui.note'
543 self.write(*msg, **opts)
543 self.write(*msg, **opts)
544 def debug(self, *msg, **opts):
544 def debug(self, *msg, **opts):
545 '''write debug message to output (if ui.debugflag is True)
545 '''write debug message to output (if ui.debugflag is True)
546
546
547 This adds an output label of "ui.debug".
547 This adds an output label of "ui.debug".
548 '''
548 '''
549 if self.debugflag:
549 if self.debugflag:
550 opts['label'] = opts.get('label', '') + ' ui.debug'
550 opts['label'] = opts.get('label', '') + ' ui.debug'
551 self.write(*msg, **opts)
551 self.write(*msg, **opts)
552 def edit(self, text, user):
552 def edit(self, text, user):
553 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
553 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
554 text=True)
554 text=True)
555 try:
555 try:
556 f = os.fdopen(fd, "w")
556 f = os.fdopen(fd, "w")
557 f.write(text)
557 f.write(text)
558 f.close()
558 f.close()
559
559
560 editor = self.geteditor()
560 editor = self.geteditor()
561
561
562 util.system("%s \"%s\"" % (editor, name),
562 util.system("%s \"%s\"" % (editor, name),
563 environ={'HGUSER': user},
563 environ={'HGUSER': user},
564 onerr=util.Abort, errprefix=_("edit failed"))
564 onerr=util.Abort, errprefix=_("edit failed"))
565
565
566 f = open(name)
566 f = open(name)
567 t = f.read()
567 t = f.read()
568 f.close()
568 f.close()
569 finally:
569 finally:
570 os.unlink(name)
570 os.unlink(name)
571
571
572 return t
572 return t
573
573
574 def traceback(self, exc=None):
574 def traceback(self, exc=None):
575 '''print exception traceback if traceback printing enabled.
575 '''print exception traceback if traceback printing enabled.
576 only to call in exception handler. returns true if traceback
576 only to call in exception handler. returns true if traceback
577 printed.'''
577 printed.'''
578 if self.tracebackflag:
578 if self.tracebackflag:
579 if exc:
579 if exc:
580 traceback.print_exception(exc[0], exc[1], exc[2])
580 traceback.print_exception(exc[0], exc[1], exc[2])
581 else:
581 else:
582 traceback.print_exc()
582 traceback.print_exc()
583 return self.tracebackflag
583 return self.tracebackflag
584
584
585 def geteditor(self):
585 def geteditor(self):
586 '''return editor to use'''
586 '''return editor to use'''
587 return (os.environ.get("HGEDITOR") or
587 return (os.environ.get("HGEDITOR") or
588 self.config("ui", "editor") or
588 self.config("ui", "editor") or
589 os.environ.get("VISUAL") or
589 os.environ.get("VISUAL") or
590 os.environ.get("EDITOR", "vi"))
590 os.environ.get("EDITOR", "vi"))
591
591
592 def progress(self, topic, pos, item="", unit="", total=None):
592 def progress(self, topic, pos, item="", unit="", total=None):
593 '''show a progress message
593 '''show a progress message
594
594
595 With stock hg, this is simply a debug message that is hidden
595 With stock hg, this is simply a debug message that is hidden
596 by default, but with extensions or GUI tools it may be
596 by default, but with extensions or GUI tools it may be
597 visible. 'topic' is the current operation, 'item' is a
597 visible. 'topic' is the current operation, 'item' is a
598 non-numeric marker of the current position (ie the currently
598 non-numeric marker of the current position (ie the currently
599 in-process file), 'pos' is the current numeric position (ie
599 in-process file), 'pos' is the current numeric position (ie
600 revision, bytes, etc.), unit is a corresponding unit label,
600 revision, bytes, etc.), unit is a corresponding unit label,
601 and total is the highest expected pos.
601 and total is the highest expected pos.
602
602
603 Multiple nested topics may be active at a time.
603 Multiple nested topics may be active at a time.
604
604
605 All topics should be marked closed by setting pos to None at
605 All topics should be marked closed by setting pos to None at
606 termination.
606 termination.
607 '''
607 '''
608
608
609 if pos is None or not self.debugflag:
609 if pos is None or not self.debugflag:
610 return
610 return
611
611
612 if unit:
612 if unit:
613 unit = ' ' + unit
613 unit = ' ' + unit
614 if item:
614 if item:
615 item = ' ' + item
615 item = ' ' + item
616
616
617 if total:
617 if total:
618 pct = 100.0 * pos / total
618 pct = 100.0 * pos / total
619 self.debug('%s:%s %s/%s%s (%4.2f%%)\n'
619 self.debug('%s:%s %s/%s%s (%4.2f%%)\n'
620 % (topic, item, pos, total, unit, pct))
620 % (topic, item, pos, total, unit, pct))
621 else:
621 else:
622 self.debug('%s:%s %s%s\n' % (topic, item, pos, unit))
622 self.debug('%s:%s %s%s\n' % (topic, item, pos, unit))
623
623
624 def log(self, service, message):
624 def log(self, service, message):
625 '''hook for logging facility extensions
625 '''hook for logging facility extensions
626
626
627 service should be a readily-identifiable subsystem, which will
627 service should be a readily-identifiable subsystem, which will
628 allow filtering.
628 allow filtering.
629 message should be a newline-terminated string to log.
629 message should be a newline-terminated string to log.
630 '''
630 '''
631 pass
631 pass
632
632
633 def label(self, msg, label):
633 def label(self, msg, label):
634 '''style msg based on supplied label
634 '''style msg based on supplied label
635
635
636 Like ui.write(), this just returns msg unchanged, but extensions
636 Like ui.write(), this just returns msg unchanged, but extensions
637 and GUI tools can override it to allow styling output without
637 and GUI tools can override it to allow styling output without
638 writing it.
638 writing it.
639
639
640 ui.write(s, 'label') is equivalent to
640 ui.write(s, 'label') is equivalent to
641 ui.write(ui.label(s, 'label')).
641 ui.write(ui.label(s, 'label')).
642 '''
642 '''
643 return msg
643 return msg
@@ -1,887 +1,625 b''
1 # url.py - HTTP handling for mercurial
1 # url.py - HTTP handling for mercurial
2 #
2 #
3 # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
4 # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 import urllib, urllib2, httplib, os, socket, cStringIO, re
10 import urllib, urllib2, httplib, os, socket, cStringIO
11 import __builtin__
11 import __builtin__
12 from i18n import _
12 from i18n import _
13 import keepalive, util
13 import keepalive, util
14
14
15 class url(object):
16 """Reliable URL parser.
17
18 This parses URLs and provides attributes for the following
19 components:
20
21 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
22
23 Missing components are set to None. The only exception is
24 fragment, which is set to '' if present but empty.
25
26 If parsefragment is False, fragment is included in query. If
27 parsequery is False, query is included in path. If both are
28 False, both fragment and query are included in path.
29
30 See http://www.ietf.org/rfc/rfc2396.txt for more information.
31
32 Note that for backward compatibility reasons, bundle URLs do not
33 take host names. That means 'bundle://../' has a path of '../'.
34
35 Examples:
36
37 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
38 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
39 >>> url('ssh://[::1]:2200//home/joe/repo')
40 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
41 >>> url('file:///home/joe/repo')
42 <url scheme: 'file', path: '/home/joe/repo'>
43 >>> url('bundle:foo')
44 <url scheme: 'bundle', path: 'foo'>
45 >>> url('bundle://../foo')
46 <url scheme: 'bundle', path: '../foo'>
47 >>> url('c:\\\\foo\\\\bar')
48 <url path: 'c:\\\\foo\\\\bar'>
49
50 Authentication credentials:
51
52 >>> url('ssh://joe:xyz@x/repo')
53 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
54 >>> url('ssh://joe@x/repo')
55 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
56
57 Query strings and fragments:
58
59 >>> url('http://host/a?b#c')
60 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
61 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
62 <url scheme: 'http', host: 'host', path: 'a?b#c'>
63 """
64
65 _safechars = "!~*'()+"
66 _safepchars = "/!~*'()+"
67 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
68
69 def __init__(self, path, parsequery=True, parsefragment=True):
70 # We slowly chomp away at path until we have only the path left
71 self.scheme = self.user = self.passwd = self.host = None
72 self.port = self.path = self.query = self.fragment = None
73 self._localpath = True
74 self._hostport = ''
75 self._origpath = path
76
77 # special case for Windows drive letters
78 if hasdriveletter(path):
79 self.path = path
80 return
81
82 # For compatibility reasons, we can't handle bundle paths as
83 # normal URLS
84 if path.startswith('bundle:'):
85 self.scheme = 'bundle'
86 path = path[7:]
87 if path.startswith('//'):
88 path = path[2:]
89 self.path = path
90 return
91
92 if self._matchscheme(path):
93 parts = path.split(':', 1)
94 if parts[0]:
95 self.scheme, path = parts
96 self._localpath = False
97
98 if not path:
99 path = None
100 if self._localpath:
101 self.path = ''
102 return
103 else:
104 if parsefragment and '#' in path:
105 path, self.fragment = path.split('#', 1)
106 if not path:
107 path = None
108 if self._localpath:
109 self.path = path
110 return
111
112 if parsequery and '?' in path:
113 path, self.query = path.split('?', 1)
114 if not path:
115 path = None
116 if not self.query:
117 self.query = None
118
119 # // is required to specify a host/authority
120 if path and path.startswith('//'):
121 parts = path[2:].split('/', 1)
122 if len(parts) > 1:
123 self.host, path = parts
124 path = path
125 else:
126 self.host = parts[0]
127 path = None
128 if not self.host:
129 self.host = None
130 if path:
131 path = '/' + path
132
133 if self.host and '@' in self.host:
134 self.user, self.host = self.host.rsplit('@', 1)
135 if ':' in self.user:
136 self.user, self.passwd = self.user.split(':', 1)
137 if not self.host:
138 self.host = None
139
140 # Don't split on colons in IPv6 addresses without ports
141 if (self.host and ':' in self.host and
142 not (self.host.startswith('[') and self.host.endswith(']'))):
143 self._hostport = self.host
144 self.host, self.port = self.host.rsplit(':', 1)
145 if not self.host:
146 self.host = None
147
148 if (self.host and self.scheme == 'file' and
149 self.host not in ('localhost', '127.0.0.1', '[::1]')):
150 raise util.Abort(_('file:// URLs can only refer to localhost'))
151
152 self.path = path
153
154 for a in ('user', 'passwd', 'host', 'port',
155 'path', 'query', 'fragment'):
156 v = getattr(self, a)
157 if v is not None:
158 setattr(self, a, urllib.unquote(v))
159
160 def __repr__(self):
161 attrs = []
162 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
163 'query', 'fragment'):
164 v = getattr(self, a)
165 if v is not None:
166 attrs.append('%s: %r' % (a, v))
167 return '<url %s>' % ', '.join(attrs)
168
169 def __str__(self):
170 """Join the URL's components back into a URL string.
171
172 Examples:
173
174 >>> str(url('http://user:pw@host:80/?foo#bar'))
175 'http://user:pw@host:80/?foo#bar'
176 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
177 'ssh://user:pw@[::1]:2200//home/joe#'
178 >>> str(url('http://localhost:80//'))
179 'http://localhost:80//'
180 >>> str(url('http://localhost:80/'))
181 'http://localhost:80/'
182 >>> str(url('http://localhost:80'))
183 'http://localhost:80/'
184 >>> str(url('bundle:foo'))
185 'bundle:foo'
186 >>> str(url('bundle://../foo'))
187 'bundle:../foo'
188 >>> str(url('path'))
189 'path'
190 """
191 if self._localpath:
192 s = self.path
193 if self.scheme == 'bundle':
194 s = 'bundle:' + s
195 if self.fragment:
196 s += '#' + self.fragment
197 return s
198
199 s = self.scheme + ':'
200 if (self.user or self.passwd or self.host or
201 self.scheme and not self.path):
202 s += '//'
203 if self.user:
204 s += urllib.quote(self.user, safe=self._safechars)
205 if self.passwd:
206 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
207 if self.user or self.passwd:
208 s += '@'
209 if self.host:
210 if not (self.host.startswith('[') and self.host.endswith(']')):
211 s += urllib.quote(self.host)
212 else:
213 s += self.host
214 if self.port:
215 s += ':' + urllib.quote(self.port)
216 if self.host:
217 s += '/'
218 if self.path:
219 s += urllib.quote(self.path, safe=self._safepchars)
220 if self.query:
221 s += '?' + urllib.quote(self.query, safe=self._safepchars)
222 if self.fragment is not None:
223 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
224 return s
225
226 def authinfo(self):
227 user, passwd = self.user, self.passwd
228 try:
229 self.user, self.passwd = None, None
230 s = str(self)
231 finally:
232 self.user, self.passwd = user, passwd
233 if not self.user:
234 return (s, None)
235 return (s, (None, (str(self), self.host),
236 self.user, self.passwd or ''))
237
238 def localpath(self):
239 if self.scheme == 'file' or self.scheme == 'bundle':
240 path = self.path or '/'
241 # For Windows, we need to promote hosts containing drive
242 # letters to paths with drive letters.
243 if hasdriveletter(self._hostport):
244 path = self._hostport + '/' + self.path
245 elif self.host is not None and self.path:
246 path = '/' + path
247 # We also need to handle the case of file:///C:/, which
248 # should return C:/, not /C:/.
249 elif hasdriveletter(path):
250 # Strip leading slash from paths with drive names
251 return path[1:]
252 return path
253 return self._origpath
254
255 def hasscheme(path):
256 return bool(url(path).scheme)
257
258 def hasdriveletter(path):
259 return path[1:2] == ':' and path[0:1].isalpha()
260
261 def localpath(path):
262 return url(path, parsequery=False, parsefragment=False).localpath()
263
264 def hidepassword(u):
265 '''hide user credential in a url string'''
266 u = url(u)
267 if u.passwd:
268 u.passwd = '***'
269 return str(u)
270
271 def removeauth(u):
272 '''remove all authentication information from a url string'''
273 u = url(u)
274 u.user = u.passwd = None
275 return str(u)
276
277 def readauthforuri(ui, uri):
15 def readauthforuri(ui, uri):
278 # Read configuration
16 # Read configuration
279 config = dict()
17 config = dict()
280 for key, val in ui.configitems('auth'):
18 for key, val in ui.configitems('auth'):
281 if '.' not in key:
19 if '.' not in key:
282 ui.warn(_("ignoring invalid [auth] key '%s'\n") % key)
20 ui.warn(_("ignoring invalid [auth] key '%s'\n") % key)
283 continue
21 continue
284 group, setting = key.rsplit('.', 1)
22 group, setting = key.rsplit('.', 1)
285 gdict = config.setdefault(group, dict())
23 gdict = config.setdefault(group, dict())
286 if setting in ('username', 'cert', 'key'):
24 if setting in ('username', 'cert', 'key'):
287 val = util.expandpath(val)
25 val = util.expandpath(val)
288 gdict[setting] = val
26 gdict[setting] = val
289
27
290 # Find the best match
28 # Find the best match
291 scheme, hostpath = uri.split('://', 1)
29 scheme, hostpath = uri.split('://', 1)
292 bestlen = 0
30 bestlen = 0
293 bestauth = None
31 bestauth = None
294 for group, auth in config.iteritems():
32 for group, auth in config.iteritems():
295 prefix = auth.get('prefix')
33 prefix = auth.get('prefix')
296 if not prefix:
34 if not prefix:
297 continue
35 continue
298 p = prefix.split('://', 1)
36 p = prefix.split('://', 1)
299 if len(p) > 1:
37 if len(p) > 1:
300 schemes, prefix = [p[0]], p[1]
38 schemes, prefix = [p[0]], p[1]
301 else:
39 else:
302 schemes = (auth.get('schemes') or 'https').split()
40 schemes = (auth.get('schemes') or 'https').split()
303 if (prefix == '*' or hostpath.startswith(prefix)) and \
41 if (prefix == '*' or hostpath.startswith(prefix)) and \
304 len(prefix) > bestlen and scheme in schemes:
42 len(prefix) > bestlen and scheme in schemes:
305 bestlen = len(prefix)
43 bestlen = len(prefix)
306 bestauth = group, auth
44 bestauth = group, auth
307 return bestauth
45 return bestauth
308
46
309 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
47 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
310 def __init__(self, ui):
48 def __init__(self, ui):
311 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
49 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
312 self.ui = ui
50 self.ui = ui
313
51
314 def find_user_password(self, realm, authuri):
52 def find_user_password(self, realm, authuri):
315 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
53 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
316 self, realm, authuri)
54 self, realm, authuri)
317 user, passwd = authinfo
55 user, passwd = authinfo
318 if user and passwd:
56 if user and passwd:
319 self._writedebug(user, passwd)
57 self._writedebug(user, passwd)
320 return (user, passwd)
58 return (user, passwd)
321
59
322 if not user:
60 if not user:
323 res = readauthforuri(self.ui, authuri)
61 res = readauthforuri(self.ui, authuri)
324 if res:
62 if res:
325 group, auth = res
63 group, auth = res
326 user, passwd = auth.get('username'), auth.get('password')
64 user, passwd = auth.get('username'), auth.get('password')
327 self.ui.debug("using auth.%s.* for authentication\n" % group)
65 self.ui.debug("using auth.%s.* for authentication\n" % group)
328 if not user or not passwd:
66 if not user or not passwd:
329 if not self.ui.interactive():
67 if not self.ui.interactive():
330 raise util.Abort(_('http authorization required'))
68 raise util.Abort(_('http authorization required'))
331
69
332 self.ui.write(_("http authorization required\n"))
70 self.ui.write(_("http authorization required\n"))
333 self.ui.write(_("realm: %s\n") % realm)
71 self.ui.write(_("realm: %s\n") % realm)
334 if user:
72 if user:
335 self.ui.write(_("user: %s\n") % user)
73 self.ui.write(_("user: %s\n") % user)
336 else:
74 else:
337 user = self.ui.prompt(_("user:"), default=None)
75 user = self.ui.prompt(_("user:"), default=None)
338
76
339 if not passwd:
77 if not passwd:
340 passwd = self.ui.getpass()
78 passwd = self.ui.getpass()
341
79
342 self.add_password(realm, authuri, user, passwd)
80 self.add_password(realm, authuri, user, passwd)
343 self._writedebug(user, passwd)
81 self._writedebug(user, passwd)
344 return (user, passwd)
82 return (user, passwd)
345
83
346 def _writedebug(self, user, passwd):
84 def _writedebug(self, user, passwd):
347 msg = _('http auth: user %s, password %s\n')
85 msg = _('http auth: user %s, password %s\n')
348 self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set'))
86 self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set'))
349
87
350 class proxyhandler(urllib2.ProxyHandler):
88 class proxyhandler(urllib2.ProxyHandler):
351 def __init__(self, ui):
89 def __init__(self, ui):
352 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
90 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
353 # XXX proxyauthinfo = None
91 # XXX proxyauthinfo = None
354
92
355 if proxyurl:
93 if proxyurl:
356 # proxy can be proper url or host[:port]
94 # proxy can be proper url or host[:port]
357 if not (proxyurl.startswith('http:') or
95 if not (proxyurl.startswith('http:') or
358 proxyurl.startswith('https:')):
96 proxyurl.startswith('https:')):
359 proxyurl = 'http://' + proxyurl + '/'
97 proxyurl = 'http://' + proxyurl + '/'
360 proxy = url(proxyurl)
98 proxy = util.url(proxyurl)
361 if not proxy.user:
99 if not proxy.user:
362 proxy.user = ui.config("http_proxy", "user")
100 proxy.user = ui.config("http_proxy", "user")
363 proxy.passwd = ui.config("http_proxy", "passwd")
101 proxy.passwd = ui.config("http_proxy", "passwd")
364
102
365 # see if we should use a proxy for this url
103 # see if we should use a proxy for this url
366 no_list = ["localhost", "127.0.0.1"]
104 no_list = ["localhost", "127.0.0.1"]
367 no_list.extend([p.lower() for
105 no_list.extend([p.lower() for
368 p in ui.configlist("http_proxy", "no")])
106 p in ui.configlist("http_proxy", "no")])
369 no_list.extend([p.strip().lower() for
107 no_list.extend([p.strip().lower() for
370 p in os.getenv("no_proxy", '').split(',')
108 p in os.getenv("no_proxy", '').split(',')
371 if p.strip()])
109 if p.strip()])
372 # "http_proxy.always" config is for running tests on localhost
110 # "http_proxy.always" config is for running tests on localhost
373 if ui.configbool("http_proxy", "always"):
111 if ui.configbool("http_proxy", "always"):
374 self.no_list = []
112 self.no_list = []
375 else:
113 else:
376 self.no_list = no_list
114 self.no_list = no_list
377
115
378 proxyurl = str(proxy)
116 proxyurl = str(proxy)
379 proxies = {'http': proxyurl, 'https': proxyurl}
117 proxies = {'http': proxyurl, 'https': proxyurl}
380 ui.debug('proxying through http://%s:%s\n' %
118 ui.debug('proxying through http://%s:%s\n' %
381 (proxy.host, proxy.port))
119 (proxy.host, proxy.port))
382 else:
120 else:
383 proxies = {}
121 proxies = {}
384
122
385 # urllib2 takes proxy values from the environment and those
123 # urllib2 takes proxy values from the environment and those
386 # will take precedence if found, so drop them
124 # will take precedence if found, so drop them
387 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
125 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
388 try:
126 try:
389 if env in os.environ:
127 if env in os.environ:
390 del os.environ[env]
128 del os.environ[env]
391 except OSError:
129 except OSError:
392 pass
130 pass
393
131
394 urllib2.ProxyHandler.__init__(self, proxies)
132 urllib2.ProxyHandler.__init__(self, proxies)
395 self.ui = ui
133 self.ui = ui
396
134
397 def proxy_open(self, req, proxy, type_):
135 def proxy_open(self, req, proxy, type_):
398 host = req.get_host().split(':')[0]
136 host = req.get_host().split(':')[0]
399 if host in self.no_list:
137 if host in self.no_list:
400 return None
138 return None
401
139
402 # work around a bug in Python < 2.4.2
140 # work around a bug in Python < 2.4.2
403 # (it leaves a "\n" at the end of Proxy-authorization headers)
141 # (it leaves a "\n" at the end of Proxy-authorization headers)
404 baseclass = req.__class__
142 baseclass = req.__class__
405 class _request(baseclass):
143 class _request(baseclass):
406 def add_header(self, key, val):
144 def add_header(self, key, val):
407 if key.lower() == 'proxy-authorization':
145 if key.lower() == 'proxy-authorization':
408 val = val.strip()
146 val = val.strip()
409 return baseclass.add_header(self, key, val)
147 return baseclass.add_header(self, key, val)
410 req.__class__ = _request
148 req.__class__ = _request
411
149
412 return urllib2.ProxyHandler.proxy_open(self, req, proxy, type_)
150 return urllib2.ProxyHandler.proxy_open(self, req, proxy, type_)
413
151
414 class httpsendfile(object):
152 class httpsendfile(object):
415 """This is a wrapper around the objects returned by python's "open".
153 """This is a wrapper around the objects returned by python's "open".
416
154
417 Its purpose is to send file-like objects via HTTP and, to do so, it
155 Its purpose is to send file-like objects via HTTP and, to do so, it
418 defines a __len__ attribute to feed the Content-Length header.
156 defines a __len__ attribute to feed the Content-Length header.
419 """
157 """
420
158
421 def __init__(self, ui, *args, **kwargs):
159 def __init__(self, ui, *args, **kwargs):
422 # We can't just "self._data = open(*args, **kwargs)" here because there
160 # We can't just "self._data = open(*args, **kwargs)" here because there
423 # is an "open" function defined in this module that shadows the global
161 # is an "open" function defined in this module that shadows the global
424 # one
162 # one
425 self.ui = ui
163 self.ui = ui
426 self._data = __builtin__.open(*args, **kwargs)
164 self._data = __builtin__.open(*args, **kwargs)
427 self.seek = self._data.seek
165 self.seek = self._data.seek
428 self.close = self._data.close
166 self.close = self._data.close
429 self.write = self._data.write
167 self.write = self._data.write
430 self._len = os.fstat(self._data.fileno()).st_size
168 self._len = os.fstat(self._data.fileno()).st_size
431 self._pos = 0
169 self._pos = 0
432 self._total = len(self) / 1024 * 2
170 self._total = len(self) / 1024 * 2
433
171
434 def read(self, *args, **kwargs):
172 def read(self, *args, **kwargs):
435 try:
173 try:
436 ret = self._data.read(*args, **kwargs)
174 ret = self._data.read(*args, **kwargs)
437 except EOFError:
175 except EOFError:
438 self.ui.progress(_('sending'), None)
176 self.ui.progress(_('sending'), None)
439 self._pos += len(ret)
177 self._pos += len(ret)
440 # We pass double the max for total because we currently have
178 # We pass double the max for total because we currently have
441 # to send the bundle twice in the case of a server that
179 # to send the bundle twice in the case of a server that
442 # requires authentication. Since we can't know until we try
180 # requires authentication. Since we can't know until we try
443 # once whether authentication will be required, just lie to
181 # once whether authentication will be required, just lie to
444 # the user and maybe the push succeeds suddenly at 50%.
182 # the user and maybe the push succeeds suddenly at 50%.
445 self.ui.progress(_('sending'), self._pos / 1024,
183 self.ui.progress(_('sending'), self._pos / 1024,
446 unit=_('kb'), total=self._total)
184 unit=_('kb'), total=self._total)
447 return ret
185 return ret
448
186
449 def __len__(self):
187 def __len__(self):
450 return self._len
188 return self._len
451
189
452 def _gen_sendfile(orgsend):
190 def _gen_sendfile(orgsend):
453 def _sendfile(self, data):
191 def _sendfile(self, data):
454 # send a file
192 # send a file
455 if isinstance(data, httpsendfile):
193 if isinstance(data, httpsendfile):
456 # if auth required, some data sent twice, so rewind here
194 # if auth required, some data sent twice, so rewind here
457 data.seek(0)
195 data.seek(0)
458 for chunk in util.filechunkiter(data):
196 for chunk in util.filechunkiter(data):
459 orgsend(self, chunk)
197 orgsend(self, chunk)
460 else:
198 else:
461 orgsend(self, data)
199 orgsend(self, data)
462 return _sendfile
200 return _sendfile
463
201
464 has_https = hasattr(urllib2, 'HTTPSHandler')
202 has_https = hasattr(urllib2, 'HTTPSHandler')
465 if has_https:
203 if has_https:
466 try:
204 try:
467 # avoid using deprecated/broken FakeSocket in python 2.6
205 # avoid using deprecated/broken FakeSocket in python 2.6
468 import ssl
206 import ssl
469 _ssl_wrap_socket = ssl.wrap_socket
207 _ssl_wrap_socket = ssl.wrap_socket
470 CERT_REQUIRED = ssl.CERT_REQUIRED
208 CERT_REQUIRED = ssl.CERT_REQUIRED
471 except ImportError:
209 except ImportError:
472 CERT_REQUIRED = 2
210 CERT_REQUIRED = 2
473
211
474 def _ssl_wrap_socket(sock, key_file, cert_file,
212 def _ssl_wrap_socket(sock, key_file, cert_file,
475 cert_reqs=CERT_REQUIRED, ca_certs=None):
213 cert_reqs=CERT_REQUIRED, ca_certs=None):
476 if ca_certs:
214 if ca_certs:
477 raise util.Abort(_(
215 raise util.Abort(_(
478 'certificate checking requires Python 2.6'))
216 'certificate checking requires Python 2.6'))
479
217
480 ssl = socket.ssl(sock, key_file, cert_file)
218 ssl = socket.ssl(sock, key_file, cert_file)
481 return httplib.FakeSocket(sock, ssl)
219 return httplib.FakeSocket(sock, ssl)
482
220
483 try:
221 try:
484 _create_connection = socket.create_connection
222 _create_connection = socket.create_connection
485 except AttributeError:
223 except AttributeError:
486 _GLOBAL_DEFAULT_TIMEOUT = object()
224 _GLOBAL_DEFAULT_TIMEOUT = object()
487
225
488 def _create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT,
226 def _create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT,
489 source_address=None):
227 source_address=None):
490 # lifted from Python 2.6
228 # lifted from Python 2.6
491
229
492 msg = "getaddrinfo returns an empty list"
230 msg = "getaddrinfo returns an empty list"
493 host, port = address
231 host, port = address
494 for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
232 for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
495 af, socktype, proto, canonname, sa = res
233 af, socktype, proto, canonname, sa = res
496 sock = None
234 sock = None
497 try:
235 try:
498 sock = socket.socket(af, socktype, proto)
236 sock = socket.socket(af, socktype, proto)
499 if timeout is not _GLOBAL_DEFAULT_TIMEOUT:
237 if timeout is not _GLOBAL_DEFAULT_TIMEOUT:
500 sock.settimeout(timeout)
238 sock.settimeout(timeout)
501 if source_address:
239 if source_address:
502 sock.bind(source_address)
240 sock.bind(source_address)
503 sock.connect(sa)
241 sock.connect(sa)
504 return sock
242 return sock
505
243
506 except socket.error, msg:
244 except socket.error, msg:
507 if sock is not None:
245 if sock is not None:
508 sock.close()
246 sock.close()
509
247
510 raise socket.error, msg
248 raise socket.error, msg
511
249
512 class httpconnection(keepalive.HTTPConnection):
250 class httpconnection(keepalive.HTTPConnection):
513 # must be able to send big bundle as stream.
251 # must be able to send big bundle as stream.
514 send = _gen_sendfile(keepalive.HTTPConnection.send)
252 send = _gen_sendfile(keepalive.HTTPConnection.send)
515
253
516 def connect(self):
254 def connect(self):
517 if has_https and self.realhostport: # use CONNECT proxy
255 if has_https and self.realhostport: # use CONNECT proxy
518 self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
256 self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
519 self.sock.connect((self.host, self.port))
257 self.sock.connect((self.host, self.port))
520 if _generic_proxytunnel(self):
258 if _generic_proxytunnel(self):
521 # we do not support client x509 certificates
259 # we do not support client x509 certificates
522 self.sock = _ssl_wrap_socket(self.sock, None, None)
260 self.sock = _ssl_wrap_socket(self.sock, None, None)
523 else:
261 else:
524 keepalive.HTTPConnection.connect(self)
262 keepalive.HTTPConnection.connect(self)
525
263
526 def getresponse(self):
264 def getresponse(self):
527 proxyres = getattr(self, 'proxyres', None)
265 proxyres = getattr(self, 'proxyres', None)
528 if proxyres:
266 if proxyres:
529 if proxyres.will_close:
267 if proxyres.will_close:
530 self.close()
268 self.close()
531 self.proxyres = None
269 self.proxyres = None
532 return proxyres
270 return proxyres
533 return keepalive.HTTPConnection.getresponse(self)
271 return keepalive.HTTPConnection.getresponse(self)
534
272
535 # general transaction handler to support different ways to handle
273 # general transaction handler to support different ways to handle
536 # HTTPS proxying before and after Python 2.6.3.
274 # HTTPS proxying before and after Python 2.6.3.
537 def _generic_start_transaction(handler, h, req):
275 def _generic_start_transaction(handler, h, req):
538 if hasattr(req, '_tunnel_host') and req._tunnel_host:
276 if hasattr(req, '_tunnel_host') and req._tunnel_host:
539 tunnel_host = req._tunnel_host
277 tunnel_host = req._tunnel_host
540 if tunnel_host[:7] not in ['http://', 'https:/']:
278 if tunnel_host[:7] not in ['http://', 'https:/']:
541 tunnel_host = 'https://' + tunnel_host
279 tunnel_host = 'https://' + tunnel_host
542 new_tunnel = True
280 new_tunnel = True
543 else:
281 else:
544 tunnel_host = req.get_selector()
282 tunnel_host = req.get_selector()
545 new_tunnel = False
283 new_tunnel = False
546
284
547 if new_tunnel or tunnel_host == req.get_full_url(): # has proxy
285 if new_tunnel or tunnel_host == req.get_full_url(): # has proxy
548 u = url(tunnel_host)
286 u = util.url(tunnel_host)
549 if new_tunnel or u.scheme == 'https': # only use CONNECT for HTTPS
287 if new_tunnel or u.scheme == 'https': # only use CONNECT for HTTPS
550 h.realhostport = ':'.join([u.host, (u.port or '443')])
288 h.realhostport = ':'.join([u.host, (u.port or '443')])
551 h.headers = req.headers.copy()
289 h.headers = req.headers.copy()
552 h.headers.update(handler.parent.addheaders)
290 h.headers.update(handler.parent.addheaders)
553 return
291 return
554
292
555 h.realhostport = None
293 h.realhostport = None
556 h.headers = None
294 h.headers = None
557
295
558 def _generic_proxytunnel(self):
296 def _generic_proxytunnel(self):
559 proxyheaders = dict(
297 proxyheaders = dict(
560 [(x, self.headers[x]) for x in self.headers
298 [(x, self.headers[x]) for x in self.headers
561 if x.lower().startswith('proxy-')])
299 if x.lower().startswith('proxy-')])
562 self._set_hostport(self.host, self.port)
300 self._set_hostport(self.host, self.port)
563 self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport)
301 self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport)
564 for header in proxyheaders.iteritems():
302 for header in proxyheaders.iteritems():
565 self.send('%s: %s\r\n' % header)
303 self.send('%s: %s\r\n' % header)
566 self.send('\r\n')
304 self.send('\r\n')
567
305
568 # majority of the following code is duplicated from
306 # majority of the following code is duplicated from
569 # httplib.HTTPConnection as there are no adequate places to
307 # httplib.HTTPConnection as there are no adequate places to
570 # override functions to provide the needed functionality
308 # override functions to provide the needed functionality
571 res = self.response_class(self.sock,
309 res = self.response_class(self.sock,
572 strict=self.strict,
310 strict=self.strict,
573 method=self._method)
311 method=self._method)
574
312
575 while True:
313 while True:
576 version, status, reason = res._read_status()
314 version, status, reason = res._read_status()
577 if status != httplib.CONTINUE:
315 if status != httplib.CONTINUE:
578 break
316 break
579 while True:
317 while True:
580 skip = res.fp.readline().strip()
318 skip = res.fp.readline().strip()
581 if not skip:
319 if not skip:
582 break
320 break
583 res.status = status
321 res.status = status
584 res.reason = reason.strip()
322 res.reason = reason.strip()
585
323
586 if res.status == 200:
324 if res.status == 200:
587 while True:
325 while True:
588 line = res.fp.readline()
326 line = res.fp.readline()
589 if line == '\r\n':
327 if line == '\r\n':
590 break
328 break
591 return True
329 return True
592
330
593 if version == 'HTTP/1.0':
331 if version == 'HTTP/1.0':
594 res.version = 10
332 res.version = 10
595 elif version.startswith('HTTP/1.'):
333 elif version.startswith('HTTP/1.'):
596 res.version = 11
334 res.version = 11
597 elif version == 'HTTP/0.9':
335 elif version == 'HTTP/0.9':
598 res.version = 9
336 res.version = 9
599 else:
337 else:
600 raise httplib.UnknownProtocol(version)
338 raise httplib.UnknownProtocol(version)
601
339
602 if res.version == 9:
340 if res.version == 9:
603 res.length = None
341 res.length = None
604 res.chunked = 0
342 res.chunked = 0
605 res.will_close = 1
343 res.will_close = 1
606 res.msg = httplib.HTTPMessage(cStringIO.StringIO())
344 res.msg = httplib.HTTPMessage(cStringIO.StringIO())
607 return False
345 return False
608
346
609 res.msg = httplib.HTTPMessage(res.fp)
347 res.msg = httplib.HTTPMessage(res.fp)
610 res.msg.fp = None
348 res.msg.fp = None
611
349
612 # are we using the chunked-style of transfer encoding?
350 # are we using the chunked-style of transfer encoding?
613 trenc = res.msg.getheader('transfer-encoding')
351 trenc = res.msg.getheader('transfer-encoding')
614 if trenc and trenc.lower() == "chunked":
352 if trenc and trenc.lower() == "chunked":
615 res.chunked = 1
353 res.chunked = 1
616 res.chunk_left = None
354 res.chunk_left = None
617 else:
355 else:
618 res.chunked = 0
356 res.chunked = 0
619
357
620 # will the connection close at the end of the response?
358 # will the connection close at the end of the response?
621 res.will_close = res._check_close()
359 res.will_close = res._check_close()
622
360
623 # do we have a Content-Length?
361 # do we have a Content-Length?
624 # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
362 # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
625 length = res.msg.getheader('content-length')
363 length = res.msg.getheader('content-length')
626 if length and not res.chunked:
364 if length and not res.chunked:
627 try:
365 try:
628 res.length = int(length)
366 res.length = int(length)
629 except ValueError:
367 except ValueError:
630 res.length = None
368 res.length = None
631 else:
369 else:
632 if res.length < 0: # ignore nonsensical negative lengths
370 if res.length < 0: # ignore nonsensical negative lengths
633 res.length = None
371 res.length = None
634 else:
372 else:
635 res.length = None
373 res.length = None
636
374
637 # does the body have a fixed length? (of zero)
375 # does the body have a fixed length? (of zero)
638 if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or
376 if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or
639 100 <= status < 200 or # 1xx codes
377 100 <= status < 200 or # 1xx codes
640 res._method == 'HEAD'):
378 res._method == 'HEAD'):
641 res.length = 0
379 res.length = 0
642
380
643 # if the connection remains open, and we aren't using chunked, and
381 # if the connection remains open, and we aren't using chunked, and
644 # a content-length was not provided, then assume that the connection
382 # a content-length was not provided, then assume that the connection
645 # WILL close.
383 # WILL close.
646 if (not res.will_close and
384 if (not res.will_close and
647 not res.chunked and
385 not res.chunked and
648 res.length is None):
386 res.length is None):
649 res.will_close = 1
387 res.will_close = 1
650
388
651 self.proxyres = res
389 self.proxyres = res
652
390
653 return False
391 return False
654
392
655 class httphandler(keepalive.HTTPHandler):
393 class httphandler(keepalive.HTTPHandler):
656 def http_open(self, req):
394 def http_open(self, req):
657 return self.do_open(httpconnection, req)
395 return self.do_open(httpconnection, req)
658
396
659 def _start_transaction(self, h, req):
397 def _start_transaction(self, h, req):
660 _generic_start_transaction(self, h, req)
398 _generic_start_transaction(self, h, req)
661 return keepalive.HTTPHandler._start_transaction(self, h, req)
399 return keepalive.HTTPHandler._start_transaction(self, h, req)
662
400
663 def _verifycert(cert, hostname):
401 def _verifycert(cert, hostname):
664 '''Verify that cert (in socket.getpeercert() format) matches hostname.
402 '''Verify that cert (in socket.getpeercert() format) matches hostname.
665 CRLs is not handled.
403 CRLs is not handled.
666
404
667 Returns error message if any problems are found and None on success.
405 Returns error message if any problems are found and None on success.
668 '''
406 '''
669 if not cert:
407 if not cert:
670 return _('no certificate received')
408 return _('no certificate received')
671 dnsname = hostname.lower()
409 dnsname = hostname.lower()
672 def matchdnsname(certname):
410 def matchdnsname(certname):
673 return (certname == dnsname or
411 return (certname == dnsname or
674 '.' in dnsname and certname == '*.' + dnsname.split('.', 1)[1])
412 '.' in dnsname and certname == '*.' + dnsname.split('.', 1)[1])
675
413
676 san = cert.get('subjectAltName', [])
414 san = cert.get('subjectAltName', [])
677 if san:
415 if san:
678 certnames = [value.lower() for key, value in san if key == 'DNS']
416 certnames = [value.lower() for key, value in san if key == 'DNS']
679 for name in certnames:
417 for name in certnames:
680 if matchdnsname(name):
418 if matchdnsname(name):
681 return None
419 return None
682 return _('certificate is for %s') % ', '.join(certnames)
420 return _('certificate is for %s') % ', '.join(certnames)
683
421
684 # subject is only checked when subjectAltName is empty
422 # subject is only checked when subjectAltName is empty
685 for s in cert.get('subject', []):
423 for s in cert.get('subject', []):
686 key, value = s[0]
424 key, value = s[0]
687 if key == 'commonName':
425 if key == 'commonName':
688 try:
426 try:
689 # 'subject' entries are unicode
427 # 'subject' entries are unicode
690 certname = value.lower().encode('ascii')
428 certname = value.lower().encode('ascii')
691 except UnicodeEncodeError:
429 except UnicodeEncodeError:
692 return _('IDN in certificate not supported')
430 return _('IDN in certificate not supported')
693 if matchdnsname(certname):
431 if matchdnsname(certname):
694 return None
432 return None
695 return _('certificate is for %s') % certname
433 return _('certificate is for %s') % certname
696 return _('no commonName or subjectAltName found in certificate')
434 return _('no commonName or subjectAltName found in certificate')
697
435
698 if has_https:
436 if has_https:
699 class httpsconnection(httplib.HTTPSConnection):
437 class httpsconnection(httplib.HTTPSConnection):
700 response_class = keepalive.HTTPResponse
438 response_class = keepalive.HTTPResponse
701 # must be able to send big bundle as stream.
439 # must be able to send big bundle as stream.
702 send = _gen_sendfile(keepalive.safesend)
440 send = _gen_sendfile(keepalive.safesend)
703 getresponse = keepalive.wrapgetresponse(httplib.HTTPSConnection)
441 getresponse = keepalive.wrapgetresponse(httplib.HTTPSConnection)
704
442
705 def connect(self):
443 def connect(self):
706 self.sock = _create_connection((self.host, self.port))
444 self.sock = _create_connection((self.host, self.port))
707
445
708 host = self.host
446 host = self.host
709 if self.realhostport: # use CONNECT proxy
447 if self.realhostport: # use CONNECT proxy
710 _generic_proxytunnel(self)
448 _generic_proxytunnel(self)
711 host = self.realhostport.rsplit(':', 1)[0]
449 host = self.realhostport.rsplit(':', 1)[0]
712
450
713 cacerts = self.ui.config('web', 'cacerts')
451 cacerts = self.ui.config('web', 'cacerts')
714 hostfingerprint = self.ui.config('hostfingerprints', host)
452 hostfingerprint = self.ui.config('hostfingerprints', host)
715
453
716 if cacerts and not hostfingerprint:
454 if cacerts and not hostfingerprint:
717 cacerts = util.expandpath(cacerts)
455 cacerts = util.expandpath(cacerts)
718 if not os.path.exists(cacerts):
456 if not os.path.exists(cacerts):
719 raise util.Abort(_('could not find '
457 raise util.Abort(_('could not find '
720 'web.cacerts: %s') % cacerts)
458 'web.cacerts: %s') % cacerts)
721 self.sock = _ssl_wrap_socket(self.sock, self.key_file,
459 self.sock = _ssl_wrap_socket(self.sock, self.key_file,
722 self.cert_file, cert_reqs=CERT_REQUIRED,
460 self.cert_file, cert_reqs=CERT_REQUIRED,
723 ca_certs=cacerts)
461 ca_certs=cacerts)
724 msg = _verifycert(self.sock.getpeercert(), host)
462 msg = _verifycert(self.sock.getpeercert(), host)
725 if msg:
463 if msg:
726 raise util.Abort(_('%s certificate error: %s '
464 raise util.Abort(_('%s certificate error: %s '
727 '(use --insecure to connect '
465 '(use --insecure to connect '
728 'insecurely)') % (host, msg))
466 'insecurely)') % (host, msg))
729 self.ui.debug('%s certificate successfully verified\n' % host)
467 self.ui.debug('%s certificate successfully verified\n' % host)
730 else:
468 else:
731 self.sock = _ssl_wrap_socket(self.sock, self.key_file,
469 self.sock = _ssl_wrap_socket(self.sock, self.key_file,
732 self.cert_file)
470 self.cert_file)
733 if hasattr(self.sock, 'getpeercert'):
471 if hasattr(self.sock, 'getpeercert'):
734 peercert = self.sock.getpeercert(True)
472 peercert = self.sock.getpeercert(True)
735 peerfingerprint = util.sha1(peercert).hexdigest()
473 peerfingerprint = util.sha1(peercert).hexdigest()
736 nicefingerprint = ":".join([peerfingerprint[x:x + 2]
474 nicefingerprint = ":".join([peerfingerprint[x:x + 2]
737 for x in xrange(0, len(peerfingerprint), 2)])
475 for x in xrange(0, len(peerfingerprint), 2)])
738 if hostfingerprint:
476 if hostfingerprint:
739 if peerfingerprint.lower() != \
477 if peerfingerprint.lower() != \
740 hostfingerprint.replace(':', '').lower():
478 hostfingerprint.replace(':', '').lower():
741 raise util.Abort(_('invalid certificate for %s '
479 raise util.Abort(_('invalid certificate for %s '
742 'with fingerprint %s') %
480 'with fingerprint %s') %
743 (host, nicefingerprint))
481 (host, nicefingerprint))
744 self.ui.debug('%s certificate matched fingerprint %s\n' %
482 self.ui.debug('%s certificate matched fingerprint %s\n' %
745 (host, nicefingerprint))
483 (host, nicefingerprint))
746 else:
484 else:
747 self.ui.warn(_('warning: %s certificate '
485 self.ui.warn(_('warning: %s certificate '
748 'with fingerprint %s not verified '
486 'with fingerprint %s not verified '
749 '(check hostfingerprints or web.cacerts '
487 '(check hostfingerprints or web.cacerts '
750 'config setting)\n') %
488 'config setting)\n') %
751 (host, nicefingerprint))
489 (host, nicefingerprint))
752 else: # python 2.5 ?
490 else: # python 2.5 ?
753 if hostfingerprint:
491 if hostfingerprint:
754 raise util.Abort(_('no certificate for %s with '
492 raise util.Abort(_('no certificate for %s with '
755 'configured hostfingerprint') % host)
493 'configured hostfingerprint') % host)
756 self.ui.warn(_('warning: %s certificate not verified '
494 self.ui.warn(_('warning: %s certificate not verified '
757 '(check web.cacerts config setting)\n') %
495 '(check web.cacerts config setting)\n') %
758 host)
496 host)
759
497
760 class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler):
498 class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler):
761 def __init__(self, ui):
499 def __init__(self, ui):
762 keepalive.KeepAliveHandler.__init__(self)
500 keepalive.KeepAliveHandler.__init__(self)
763 urllib2.HTTPSHandler.__init__(self)
501 urllib2.HTTPSHandler.__init__(self)
764 self.ui = ui
502 self.ui = ui
765 self.pwmgr = passwordmgr(self.ui)
503 self.pwmgr = passwordmgr(self.ui)
766
504
767 def _start_transaction(self, h, req):
505 def _start_transaction(self, h, req):
768 _generic_start_transaction(self, h, req)
506 _generic_start_transaction(self, h, req)
769 return keepalive.KeepAliveHandler._start_transaction(self, h, req)
507 return keepalive.KeepAliveHandler._start_transaction(self, h, req)
770
508
771 def https_open(self, req):
509 def https_open(self, req):
772 res = readauthforuri(self.ui, req.get_full_url())
510 res = readauthforuri(self.ui, req.get_full_url())
773 if res:
511 if res:
774 group, auth = res
512 group, auth = res
775 self.auth = auth
513 self.auth = auth
776 self.ui.debug("using auth.%s.* for authentication\n" % group)
514 self.ui.debug("using auth.%s.* for authentication\n" % group)
777 else:
515 else:
778 self.auth = None
516 self.auth = None
779 return self.do_open(self._makeconnection, req)
517 return self.do_open(self._makeconnection, req)
780
518
781 def _makeconnection(self, host, port=None, *args, **kwargs):
519 def _makeconnection(self, host, port=None, *args, **kwargs):
782 keyfile = None
520 keyfile = None
783 certfile = None
521 certfile = None
784
522
785 if len(args) >= 1: # key_file
523 if len(args) >= 1: # key_file
786 keyfile = args[0]
524 keyfile = args[0]
787 if len(args) >= 2: # cert_file
525 if len(args) >= 2: # cert_file
788 certfile = args[1]
526 certfile = args[1]
789 args = args[2:]
527 args = args[2:]
790
528
791 # if the user has specified different key/cert files in
529 # if the user has specified different key/cert files in
792 # hgrc, we prefer these
530 # hgrc, we prefer these
793 if self.auth and 'key' in self.auth and 'cert' in self.auth:
531 if self.auth and 'key' in self.auth and 'cert' in self.auth:
794 keyfile = self.auth['key']
532 keyfile = self.auth['key']
795 certfile = self.auth['cert']
533 certfile = self.auth['cert']
796
534
797 conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs)
535 conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs)
798 conn.ui = self.ui
536 conn.ui = self.ui
799 return conn
537 return conn
800
538
801 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
539 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
802 def __init__(self, *args, **kwargs):
540 def __init__(self, *args, **kwargs):
803 urllib2.HTTPDigestAuthHandler.__init__(self, *args, **kwargs)
541 urllib2.HTTPDigestAuthHandler.__init__(self, *args, **kwargs)
804 self.retried_req = None
542 self.retried_req = None
805
543
806 def reset_retry_count(self):
544 def reset_retry_count(self):
807 # Python 2.6.5 will call this on 401 or 407 errors and thus loop
545 # Python 2.6.5 will call this on 401 or 407 errors and thus loop
808 # forever. We disable reset_retry_count completely and reset in
546 # forever. We disable reset_retry_count completely and reset in
809 # http_error_auth_reqed instead.
547 # http_error_auth_reqed instead.
810 pass
548 pass
811
549
812 def http_error_auth_reqed(self, auth_header, host, req, headers):
550 def http_error_auth_reqed(self, auth_header, host, req, headers):
813 # Reset the retry counter once for each request.
551 # Reset the retry counter once for each request.
814 if req is not self.retried_req:
552 if req is not self.retried_req:
815 self.retried_req = req
553 self.retried_req = req
816 self.retried = 0
554 self.retried = 0
817 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
555 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
818 # it doesn't know about the auth type requested. This can happen if
556 # it doesn't know about the auth type requested. This can happen if
819 # somebody is using BasicAuth and types a bad password.
557 # somebody is using BasicAuth and types a bad password.
820 try:
558 try:
821 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
559 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
822 self, auth_header, host, req, headers)
560 self, auth_header, host, req, headers)
823 except ValueError, inst:
561 except ValueError, inst:
824 arg = inst.args[0]
562 arg = inst.args[0]
825 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
563 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
826 return
564 return
827 raise
565 raise
828
566
829 class httpbasicauthhandler(urllib2.HTTPBasicAuthHandler):
567 class httpbasicauthhandler(urllib2.HTTPBasicAuthHandler):
830 def __init__(self, *args, **kwargs):
568 def __init__(self, *args, **kwargs):
831 urllib2.HTTPBasicAuthHandler.__init__(self, *args, **kwargs)
569 urllib2.HTTPBasicAuthHandler.__init__(self, *args, **kwargs)
832 self.retried_req = None
570 self.retried_req = None
833
571
834 def reset_retry_count(self):
572 def reset_retry_count(self):
835 # Python 2.6.5 will call this on 401 or 407 errors and thus loop
573 # Python 2.6.5 will call this on 401 or 407 errors and thus loop
836 # forever. We disable reset_retry_count completely and reset in
574 # forever. We disable reset_retry_count completely and reset in
837 # http_error_auth_reqed instead.
575 # http_error_auth_reqed instead.
838 pass
576 pass
839
577
840 def http_error_auth_reqed(self, auth_header, host, req, headers):
578 def http_error_auth_reqed(self, auth_header, host, req, headers):
841 # Reset the retry counter once for each request.
579 # Reset the retry counter once for each request.
842 if req is not self.retried_req:
580 if req is not self.retried_req:
843 self.retried_req = req
581 self.retried_req = req
844 self.retried = 0
582 self.retried = 0
845 return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed(
583 return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed(
846 self, auth_header, host, req, headers)
584 self, auth_header, host, req, headers)
847
585
848 handlerfuncs = []
586 handlerfuncs = []
849
587
850 def opener(ui, authinfo=None):
588 def opener(ui, authinfo=None):
851 '''
589 '''
852 construct an opener suitable for urllib2
590 construct an opener suitable for urllib2
853 authinfo will be added to the password manager
591 authinfo will be added to the password manager
854 '''
592 '''
855 handlers = [httphandler()]
593 handlers = [httphandler()]
856 if has_https:
594 if has_https:
857 handlers.append(httpshandler(ui))
595 handlers.append(httpshandler(ui))
858
596
859 handlers.append(proxyhandler(ui))
597 handlers.append(proxyhandler(ui))
860
598
861 passmgr = passwordmgr(ui)
599 passmgr = passwordmgr(ui)
862 if authinfo is not None:
600 if authinfo is not None:
863 passmgr.add_password(*authinfo)
601 passmgr.add_password(*authinfo)
864 user, passwd = authinfo[2:4]
602 user, passwd = authinfo[2:4]
865 ui.debug('http auth: user %s, password %s\n' %
603 ui.debug('http auth: user %s, password %s\n' %
866 (user, passwd and '*' * len(passwd) or 'not set'))
604 (user, passwd and '*' * len(passwd) or 'not set'))
867
605
868 handlers.extend((httpbasicauthhandler(passmgr),
606 handlers.extend((httpbasicauthhandler(passmgr),
869 httpdigestauthhandler(passmgr)))
607 httpdigestauthhandler(passmgr)))
870 handlers.extend([h(ui, passmgr) for h in handlerfuncs])
608 handlers.extend([h(ui, passmgr) for h in handlerfuncs])
871 opener = urllib2.build_opener(*handlers)
609 opener = urllib2.build_opener(*handlers)
872
610
873 # 1.0 here is the _protocol_ version
611 # 1.0 here is the _protocol_ version
874 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
612 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
875 opener.addheaders.append(('Accept', 'application/mercurial-0.1'))
613 opener.addheaders.append(('Accept', 'application/mercurial-0.1'))
876 return opener
614 return opener
877
615
878 def open(ui, url_, data=None):
616 def open(ui, url_, data=None):
879 u = url(url_)
617 u = util.url(url_)
880 if u.scheme:
618 if u.scheme:
881 u.scheme = u.scheme.lower()
619 u.scheme = u.scheme.lower()
882 url_, authinfo = u.authinfo()
620 url_, authinfo = u.authinfo()
883 else:
621 else:
884 path = util.normpath(os.path.abspath(url_))
622 path = util.normpath(os.path.abspath(url_))
885 url_ = 'file://' + urllib.pathname2url(path)
623 url_ = 'file://' + urllib.pathname2url(path)
886 authinfo = None
624 authinfo = None
887 return opener(ui, authinfo).open(url_, data)
625 return opener(ui, authinfo).open(url_, data)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now