##// END OF EJS Templates
py3: replace os.environ with encoding.environ (part 2 of 5)
Pulkit Goyal -
r30635:a150173d default
parent child Browse files
Show More
@@ -1,175 +1,176 b''
1 # pager.py - display output using a pager
1 # pager.py - display output using a pager
2 #
2 #
3 # Copyright 2008 David Soria Parra <dsp@php.net>
3 # Copyright 2008 David Soria Parra <dsp@php.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # To load the extension, add it to your configuration file:
8 # To load the extension, add it to your configuration file:
9 #
9 #
10 # [extension]
10 # [extension]
11 # pager =
11 # pager =
12 #
12 #
13 # Run 'hg help pager' to get info on configuration.
13 # Run 'hg help pager' to get info on configuration.
14
14
15 '''browse command output with an external pager
15 '''browse command output with an external pager
16
16
17 To set the pager that should be used, set the application variable::
17 To set the pager that should be used, set the application variable::
18
18
19 [pager]
19 [pager]
20 pager = less -FRX
20 pager = less -FRX
21
21
22 If no pager is set, the pager extensions uses the environment variable
22 If no pager is set, the pager extensions uses the environment variable
23 $PAGER. If neither pager.pager, nor $PAGER is set, no pager is used.
23 $PAGER. If neither pager.pager, nor $PAGER is set, no pager is used.
24
24
25 You can disable the pager for certain commands by adding them to the
25 You can disable the pager for certain commands by adding them to the
26 pager.ignore list::
26 pager.ignore list::
27
27
28 [pager]
28 [pager]
29 ignore = version, help, update
29 ignore = version, help, update
30
30
31 You can also enable the pager only for certain commands using
31 You can also enable the pager only for certain commands using
32 pager.attend. Below is the default list of commands to be paged::
32 pager.attend. Below is the default list of commands to be paged::
33
33
34 [pager]
34 [pager]
35 attend = annotate, cat, diff, export, glog, log, qdiff
35 attend = annotate, cat, diff, export, glog, log, qdiff
36
36
37 Setting pager.attend to an empty value will cause all commands to be
37 Setting pager.attend to an empty value will cause all commands to be
38 paged.
38 paged.
39
39
40 If pager.attend is present, pager.ignore will be ignored.
40 If pager.attend is present, pager.ignore will be ignored.
41
41
42 Lastly, you can enable and disable paging for individual commands with
42 Lastly, you can enable and disable paging for individual commands with
43 the attend-<command> option. This setting takes precedence over
43 the attend-<command> option. This setting takes precedence over
44 existing attend and ignore options and defaults::
44 existing attend and ignore options and defaults::
45
45
46 [pager]
46 [pager]
47 attend-cat = false
47 attend-cat = false
48
48
49 To ignore global commands like :hg:`version` or :hg:`help`, you have
49 To ignore global commands like :hg:`version` or :hg:`help`, you have
50 to specify them in your user configuration file.
50 to specify them in your user configuration file.
51
51
52 To control whether the pager is used at all for an individual command,
52 To control whether the pager is used at all for an individual command,
53 you can use --pager=<value>::
53 you can use --pager=<value>::
54
54
55 - use as needed: `auto`.
55 - use as needed: `auto`.
56 - require the pager: `yes` or `on`.
56 - require the pager: `yes` or `on`.
57 - suppress the pager: `no` or `off` (any unrecognized value
57 - suppress the pager: `no` or `off` (any unrecognized value
58 will also work).
58 will also work).
59
59
60 '''
60 '''
61 from __future__ import absolute_import
61 from __future__ import absolute_import
62
62
63 import atexit
63 import atexit
64 import os
64 import os
65 import signal
65 import signal
66 import subprocess
66 import subprocess
67 import sys
67 import sys
68
68
69 from mercurial.i18n import _
69 from mercurial.i18n import _
70 from mercurial import (
70 from mercurial import (
71 cmdutil,
71 cmdutil,
72 commands,
72 commands,
73 dispatch,
73 dispatch,
74 encoding,
74 extensions,
75 extensions,
75 util,
76 util,
76 )
77 )
77
78
78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
80 # be specifying the version(s) of Mercurial they are tested with, or
81 # be specifying the version(s) of Mercurial they are tested with, or
81 # leave the attribute unspecified.
82 # leave the attribute unspecified.
82 testedwith = 'ships-with-hg-core'
83 testedwith = 'ships-with-hg-core'
83
84
84 def _runpager(ui, p):
85 def _runpager(ui, p):
85 pager = subprocess.Popen(p, shell=True, bufsize=-1,
86 pager = subprocess.Popen(p, shell=True, bufsize=-1,
86 close_fds=util.closefds, stdin=subprocess.PIPE,
87 close_fds=util.closefds, stdin=subprocess.PIPE,
87 stdout=util.stdout, stderr=util.stderr)
88 stdout=util.stdout, stderr=util.stderr)
88
89
89 # back up original file objects and descriptors
90 # back up original file objects and descriptors
90 olduifout = ui.fout
91 olduifout = ui.fout
91 oldstdout = util.stdout
92 oldstdout = util.stdout
92 stdoutfd = os.dup(util.stdout.fileno())
93 stdoutfd = os.dup(util.stdout.fileno())
93 stderrfd = os.dup(util.stderr.fileno())
94 stderrfd = os.dup(util.stderr.fileno())
94
95
95 # create new line-buffered stdout so that output can show up immediately
96 # create new line-buffered stdout so that output can show up immediately
96 ui.fout = util.stdout = newstdout = os.fdopen(util.stdout.fileno(), 'wb', 1)
97 ui.fout = util.stdout = newstdout = os.fdopen(util.stdout.fileno(), 'wb', 1)
97 os.dup2(pager.stdin.fileno(), util.stdout.fileno())
98 os.dup2(pager.stdin.fileno(), util.stdout.fileno())
98 if ui._isatty(util.stderr):
99 if ui._isatty(util.stderr):
99 os.dup2(pager.stdin.fileno(), util.stderr.fileno())
100 os.dup2(pager.stdin.fileno(), util.stderr.fileno())
100
101
101 @atexit.register
102 @atexit.register
102 def killpager():
103 def killpager():
103 if util.safehasattr(signal, "SIGINT"):
104 if util.safehasattr(signal, "SIGINT"):
104 signal.signal(signal.SIGINT, signal.SIG_IGN)
105 signal.signal(signal.SIGINT, signal.SIG_IGN)
105 pager.stdin.close()
106 pager.stdin.close()
106 ui.fout = olduifout
107 ui.fout = olduifout
107 util.stdout = oldstdout
108 util.stdout = oldstdout
108 # close new stdout while it's associated with pager; otherwise stdout
109 # close new stdout while it's associated with pager; otherwise stdout
109 # fd would be closed when newstdout is deleted
110 # fd would be closed when newstdout is deleted
110 newstdout.close()
111 newstdout.close()
111 # restore original fds: stdout is open again
112 # restore original fds: stdout is open again
112 os.dup2(stdoutfd, util.stdout.fileno())
113 os.dup2(stdoutfd, util.stdout.fileno())
113 os.dup2(stderrfd, util.stderr.fileno())
114 os.dup2(stderrfd, util.stderr.fileno())
114 pager.wait()
115 pager.wait()
115
116
116 def uisetup(ui):
117 def uisetup(ui):
117 if '--debugger' in sys.argv or not ui.formatted():
118 if '--debugger' in sys.argv or not ui.formatted():
118 return
119 return
119
120
120 # chg has its own pager implementation
121 # chg has its own pager implementation
121 argv = sys.argv[:]
122 argv = sys.argv[:]
122 if 'chgunix' in dispatch._earlygetopt(['--cmdserver'], argv):
123 if 'chgunix' in dispatch._earlygetopt(['--cmdserver'], argv):
123 return
124 return
124
125
125 def pagecmd(orig, ui, options, cmd, cmdfunc):
126 def pagecmd(orig, ui, options, cmd, cmdfunc):
126 p = ui.config("pager", "pager", os.environ.get("PAGER"))
127 p = ui.config("pager", "pager", encoding.environ.get("PAGER"))
127 usepager = False
128 usepager = False
128 always = util.parsebool(options['pager'])
129 always = util.parsebool(options['pager'])
129 auto = options['pager'] == 'auto'
130 auto = options['pager'] == 'auto'
130
131
131 if not p:
132 if not p:
132 pass
133 pass
133 elif always:
134 elif always:
134 usepager = True
135 usepager = True
135 elif not auto:
136 elif not auto:
136 usepager = False
137 usepager = False
137 else:
138 else:
138 attend = ui.configlist('pager', 'attend', attended)
139 attend = ui.configlist('pager', 'attend', attended)
139 ignore = ui.configlist('pager', 'ignore')
140 ignore = ui.configlist('pager', 'ignore')
140 cmds, _ = cmdutil.findcmd(cmd, commands.table)
141 cmds, _ = cmdutil.findcmd(cmd, commands.table)
141
142
142 for cmd in cmds:
143 for cmd in cmds:
143 var = 'attend-%s' % cmd
144 var = 'attend-%s' % cmd
144 if ui.config('pager', var):
145 if ui.config('pager', var):
145 usepager = ui.configbool('pager', var)
146 usepager = ui.configbool('pager', var)
146 break
147 break
147 if (cmd in attend or
148 if (cmd in attend or
148 (cmd not in ignore and not attend)):
149 (cmd not in ignore and not attend)):
149 usepager = True
150 usepager = True
150 break
151 break
151
152
152 setattr(ui, 'pageractive', usepager)
153 setattr(ui, 'pageractive', usepager)
153
154
154 if usepager:
155 if usepager:
155 ui.setconfig('ui', 'formatted', ui.formatted(), 'pager')
156 ui.setconfig('ui', 'formatted', ui.formatted(), 'pager')
156 ui.setconfig('ui', 'interactive', False, 'pager')
157 ui.setconfig('ui', 'interactive', False, 'pager')
157 if util.safehasattr(signal, "SIGPIPE"):
158 if util.safehasattr(signal, "SIGPIPE"):
158 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
159 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
159 _runpager(ui, p)
160 _runpager(ui, p)
160 return orig(ui, options, cmd, cmdfunc)
161 return orig(ui, options, cmd, cmdfunc)
161
162
162 # Wrap dispatch._runcommand after color is loaded so color can see
163 # Wrap dispatch._runcommand after color is loaded so color can see
163 # ui.pageractive. Otherwise, if we loaded first, color's wrapped
164 # ui.pageractive. Otherwise, if we loaded first, color's wrapped
164 # dispatch._runcommand would run without having access to ui.pageractive.
165 # dispatch._runcommand would run without having access to ui.pageractive.
165 def afterloaded(loaded):
166 def afterloaded(loaded):
166 extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
167 extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
167 extensions.afterloaded('color', afterloaded)
168 extensions.afterloaded('color', afterloaded)
168
169
169 def extsetup(ui):
170 def extsetup(ui):
170 commands.globalopts.append(
171 commands.globalopts.append(
171 ('', 'pager', 'auto',
172 ('', 'pager', 'auto',
172 _("when to paginate (boolean, always, auto, or never)"),
173 _("when to paginate (boolean, always, auto, or never)"),
173 _('TYPE')))
174 _('TYPE')))
174
175
175 attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff']
176 attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff']
@@ -1,724 +1,725 b''
1 # patchbomb.py - sending Mercurial changesets as patch emails
1 # patchbomb.py - sending Mercurial changesets as patch emails
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to send changesets as (a series of) patch emails
8 '''command to send changesets as (a series of) patch emails
9
9
10 The series is started off with a "[PATCH 0 of N]" introduction, which
10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 describes the series as a whole.
11 describes the series as a whole.
12
12
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 first line of the changeset description as the subject text. The
14 first line of the changeset description as the subject text. The
15 message contains two or three body parts:
15 message contains two or three body parts:
16
16
17 - The changeset description.
17 - The changeset description.
18 - [Optional] The result of running diffstat on the patch.
18 - [Optional] The result of running diffstat on the patch.
19 - The patch itself, as generated by :hg:`export`.
19 - The patch itself, as generated by :hg:`export`.
20
20
21 Each message refers to the first in the series using the In-Reply-To
21 Each message refers to the first in the series using the In-Reply-To
22 and References headers, so they will show up as a sequence in threaded
22 and References headers, so they will show up as a sequence in threaded
23 mail and news readers, and in mail archives.
23 mail and news readers, and in mail archives.
24
24
25 To configure other defaults, add a section like this to your
25 To configure other defaults, add a section like this to your
26 configuration file::
26 configuration file::
27
27
28 [email]
28 [email]
29 from = My Name <my@email>
29 from = My Name <my@email>
30 to = recipient1, recipient2, ...
30 to = recipient1, recipient2, ...
31 cc = cc1, cc2, ...
31 cc = cc1, cc2, ...
32 bcc = bcc1, bcc2, ...
32 bcc = bcc1, bcc2, ...
33 reply-to = address1, address2, ...
33 reply-to = address1, address2, ...
34
34
35 Use ``[patchbomb]`` as configuration section name if you need to
35 Use ``[patchbomb]`` as configuration section name if you need to
36 override global ``[email]`` address settings.
36 override global ``[email]`` address settings.
37
37
38 Then you can use the :hg:`email` command to mail a series of
38 Then you can use the :hg:`email` command to mail a series of
39 changesets as a patchbomb.
39 changesets as a patchbomb.
40
40
41 You can also either configure the method option in the email section
41 You can also either configure the method option in the email section
42 to be a sendmail compatible mailer or fill out the [smtp] section so
42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 that the patchbomb extension can automatically send patchbombs
43 that the patchbomb extension can automatically send patchbombs
44 directly from the commandline. See the [email] and [smtp] sections in
44 directly from the commandline. See the [email] and [smtp] sections in
45 hgrc(5) for details.
45 hgrc(5) for details.
46
46
47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
47 By default, :hg:`email` will prompt for a ``To`` or ``CC`` header if
48 you do not supply one via configuration or the command line. You can
48 you do not supply one via configuration or the command line. You can
49 override this to never prompt by configuring an empty value::
49 override this to never prompt by configuring an empty value::
50
50
51 [email]
51 [email]
52 cc =
52 cc =
53
53
54 You can control the default inclusion of an introduction message with the
54 You can control the default inclusion of an introduction message with the
55 ``patchbomb.intro`` configuration option. The configuration is always
55 ``patchbomb.intro`` configuration option. The configuration is always
56 overwritten by command line flags like --intro and --desc::
56 overwritten by command line flags like --intro and --desc::
57
57
58 [patchbomb]
58 [patchbomb]
59 intro=auto # include introduction message if more than 1 patch (default)
59 intro=auto # include introduction message if more than 1 patch (default)
60 intro=never # never include an introduction message
60 intro=never # never include an introduction message
61 intro=always # always include an introduction message
61 intro=always # always include an introduction message
62
62
63 You can set patchbomb to always ask for confirmation by setting
63 You can set patchbomb to always ask for confirmation by setting
64 ``patchbomb.confirm`` to true.
64 ``patchbomb.confirm`` to true.
65 '''
65 '''
66 from __future__ import absolute_import
66 from __future__ import absolute_import
67
67
68 import email as emailmod
68 import email as emailmod
69 import errno
69 import errno
70 import os
70 import os
71 import socket
71 import socket
72 import tempfile
72 import tempfile
73
73
74 from mercurial.i18n import _
74 from mercurial.i18n import _
75 from mercurial import (
75 from mercurial import (
76 cmdutil,
76 cmdutil,
77 commands,
77 commands,
78 encoding,
78 error,
79 error,
79 hg,
80 hg,
80 mail,
81 mail,
81 node as nodemod,
82 node as nodemod,
82 patch,
83 patch,
83 scmutil,
84 scmutil,
84 util,
85 util,
85 )
86 )
86 stringio = util.stringio
87 stringio = util.stringio
87
88
88 cmdtable = {}
89 cmdtable = {}
89 command = cmdutil.command(cmdtable)
90 command = cmdutil.command(cmdtable)
90 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
91 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
91 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
92 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
92 # be specifying the version(s) of Mercurial they are tested with, or
93 # be specifying the version(s) of Mercurial they are tested with, or
93 # leave the attribute unspecified.
94 # leave the attribute unspecified.
94 testedwith = 'ships-with-hg-core'
95 testedwith = 'ships-with-hg-core'
95
96
96 def _addpullheader(seq, ctx):
97 def _addpullheader(seq, ctx):
97 """Add a header pointing to a public URL where the changeset is available
98 """Add a header pointing to a public URL where the changeset is available
98 """
99 """
99 repo = ctx.repo()
100 repo = ctx.repo()
100 # experimental config: patchbomb.publicurl
101 # experimental config: patchbomb.publicurl
101 # waiting for some logic that check that the changeset are available on the
102 # waiting for some logic that check that the changeset are available on the
102 # destination before patchbombing anything.
103 # destination before patchbombing anything.
103 pullurl = repo.ui.config('patchbomb', 'publicurl')
104 pullurl = repo.ui.config('patchbomb', 'publicurl')
104 if pullurl is not None:
105 if pullurl is not None:
105 return ('Available At %s\n'
106 return ('Available At %s\n'
106 '# hg pull %s -r %s' % (pullurl, pullurl, ctx))
107 '# hg pull %s -r %s' % (pullurl, pullurl, ctx))
107 return None
108 return None
108
109
109 def uisetup(ui):
110 def uisetup(ui):
110 cmdutil.extraexport.append('pullurl')
111 cmdutil.extraexport.append('pullurl')
111 cmdutil.extraexportmap['pullurl'] = _addpullheader
112 cmdutil.extraexportmap['pullurl'] = _addpullheader
112
113
113
114
114 def prompt(ui, prompt, default=None, rest=':'):
115 def prompt(ui, prompt, default=None, rest=':'):
115 if default:
116 if default:
116 prompt += ' [%s]' % default
117 prompt += ' [%s]' % default
117 return ui.prompt(prompt + rest, default)
118 return ui.prompt(prompt + rest, default)
118
119
119 def introwanted(ui, opts, number):
120 def introwanted(ui, opts, number):
120 '''is an introductory message apparently wanted?'''
121 '''is an introductory message apparently wanted?'''
121 introconfig = ui.config('patchbomb', 'intro', 'auto')
122 introconfig = ui.config('patchbomb', 'intro', 'auto')
122 if opts.get('intro') or opts.get('desc'):
123 if opts.get('intro') or opts.get('desc'):
123 intro = True
124 intro = True
124 elif introconfig == 'always':
125 elif introconfig == 'always':
125 intro = True
126 intro = True
126 elif introconfig == 'never':
127 elif introconfig == 'never':
127 intro = False
128 intro = False
128 elif introconfig == 'auto':
129 elif introconfig == 'auto':
129 intro = 1 < number
130 intro = 1 < number
130 else:
131 else:
131 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
132 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
132 % introconfig)
133 % introconfig)
133 ui.write_err(_('(should be one of always, never, auto)\n'))
134 ui.write_err(_('(should be one of always, never, auto)\n'))
134 intro = 1 < number
135 intro = 1 < number
135 return intro
136 return intro
136
137
137 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
138 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
138 patchname=None):
139 patchname=None):
139
140
140 desc = []
141 desc = []
141 node = None
142 node = None
142 body = ''
143 body = ''
143
144
144 for line in patchlines:
145 for line in patchlines:
145 if line.startswith('#'):
146 if line.startswith('#'):
146 if line.startswith('# Node ID'):
147 if line.startswith('# Node ID'):
147 node = line.split()[-1]
148 node = line.split()[-1]
148 continue
149 continue
149 if line.startswith('diff -r') or line.startswith('diff --git'):
150 if line.startswith('diff -r') or line.startswith('diff --git'):
150 break
151 break
151 desc.append(line)
152 desc.append(line)
152
153
153 if not patchname and not node:
154 if not patchname and not node:
154 raise ValueError
155 raise ValueError
155
156
156 if opts.get('attach') and not opts.get('body'):
157 if opts.get('attach') and not opts.get('body'):
157 body = ('\n'.join(desc[1:]).strip() or
158 body = ('\n'.join(desc[1:]).strip() or
158 'Patch subject is complete summary.')
159 'Patch subject is complete summary.')
159 body += '\n\n\n'
160 body += '\n\n\n'
160
161
161 if opts.get('plain'):
162 if opts.get('plain'):
162 while patchlines and patchlines[0].startswith('# '):
163 while patchlines and patchlines[0].startswith('# '):
163 patchlines.pop(0)
164 patchlines.pop(0)
164 if patchlines:
165 if patchlines:
165 patchlines.pop(0)
166 patchlines.pop(0)
166 while patchlines and not patchlines[0].strip():
167 while patchlines and not patchlines[0].strip():
167 patchlines.pop(0)
168 patchlines.pop(0)
168
169
169 ds = patch.diffstat(patchlines)
170 ds = patch.diffstat(patchlines)
170 if opts.get('diffstat'):
171 if opts.get('diffstat'):
171 body += ds + '\n\n'
172 body += ds + '\n\n'
172
173
173 addattachment = opts.get('attach') or opts.get('inline')
174 addattachment = opts.get('attach') or opts.get('inline')
174 if not addattachment or opts.get('body'):
175 if not addattachment or opts.get('body'):
175 body += '\n'.join(patchlines)
176 body += '\n'.join(patchlines)
176
177
177 if addattachment:
178 if addattachment:
178 msg = emailmod.MIMEMultipart.MIMEMultipart()
179 msg = emailmod.MIMEMultipart.MIMEMultipart()
179 if body:
180 if body:
180 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
181 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
181 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
182 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
182 opts.get('test'))
183 opts.get('test'))
183 binnode = nodemod.bin(node)
184 binnode = nodemod.bin(node)
184 # if node is mq patch, it will have the patch file's name as a tag
185 # if node is mq patch, it will have the patch file's name as a tag
185 if not patchname:
186 if not patchname:
186 patchtags = [t for t in repo.nodetags(binnode)
187 patchtags = [t for t in repo.nodetags(binnode)
187 if t.endswith('.patch') or t.endswith('.diff')]
188 if t.endswith('.patch') or t.endswith('.diff')]
188 if patchtags:
189 if patchtags:
189 patchname = patchtags[0]
190 patchname = patchtags[0]
190 elif total > 1:
191 elif total > 1:
191 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
192 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
192 binnode, seqno=idx,
193 binnode, seqno=idx,
193 total=total)
194 total=total)
194 else:
195 else:
195 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
196 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
196 disposition = 'inline'
197 disposition = 'inline'
197 if opts.get('attach'):
198 if opts.get('attach'):
198 disposition = 'attachment'
199 disposition = 'attachment'
199 p['Content-Disposition'] = disposition + '; filename=' + patchname
200 p['Content-Disposition'] = disposition + '; filename=' + patchname
200 msg.attach(p)
201 msg.attach(p)
201 else:
202 else:
202 msg = mail.mimetextpatch(body, display=opts.get('test'))
203 msg = mail.mimetextpatch(body, display=opts.get('test'))
203
204
204 flag = ' '.join(opts.get('flag'))
205 flag = ' '.join(opts.get('flag'))
205 if flag:
206 if flag:
206 flag = ' ' + flag
207 flag = ' ' + flag
207
208
208 subj = desc[0].strip().rstrip('. ')
209 subj = desc[0].strip().rstrip('. ')
209 if not numbered:
210 if not numbered:
210 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
211 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
211 else:
212 else:
212 tlen = len(str(total))
213 tlen = len(str(total))
213 subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
214 subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
214 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
215 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
215 msg['X-Mercurial-Node'] = node
216 msg['X-Mercurial-Node'] = node
216 msg['X-Mercurial-Series-Index'] = '%i' % idx
217 msg['X-Mercurial-Series-Index'] = '%i' % idx
217 msg['X-Mercurial-Series-Total'] = '%i' % total
218 msg['X-Mercurial-Series-Total'] = '%i' % total
218 return msg, subj, ds
219 return msg, subj, ds
219
220
220 def _getpatches(repo, revs, **opts):
221 def _getpatches(repo, revs, **opts):
221 """return a list of patches for a list of revisions
222 """return a list of patches for a list of revisions
222
223
223 Each patch in the list is itself a list of lines.
224 Each patch in the list is itself a list of lines.
224 """
225 """
225 ui = repo.ui
226 ui = repo.ui
226 prev = repo['.'].rev()
227 prev = repo['.'].rev()
227 for r in revs:
228 for r in revs:
228 if r == prev and (repo[None].files() or repo[None].deleted()):
229 if r == prev and (repo[None].files() or repo[None].deleted()):
229 ui.warn(_('warning: working directory has '
230 ui.warn(_('warning: working directory has '
230 'uncommitted changes\n'))
231 'uncommitted changes\n'))
231 output = stringio()
232 output = stringio()
232 cmdutil.export(repo, [r], fp=output,
233 cmdutil.export(repo, [r], fp=output,
233 opts=patch.difffeatureopts(ui, opts, git=True))
234 opts=patch.difffeatureopts(ui, opts, git=True))
234 yield output.getvalue().split('\n')
235 yield output.getvalue().split('\n')
235 def _getbundle(repo, dest, **opts):
236 def _getbundle(repo, dest, **opts):
236 """return a bundle containing changesets missing in "dest"
237 """return a bundle containing changesets missing in "dest"
237
238
238 The `opts` keyword-arguments are the same as the one accepted by the
239 The `opts` keyword-arguments are the same as the one accepted by the
239 `bundle` command.
240 `bundle` command.
240
241
241 The bundle is a returned as a single in-memory binary blob.
242 The bundle is a returned as a single in-memory binary blob.
242 """
243 """
243 ui = repo.ui
244 ui = repo.ui
244 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
245 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
245 tmpfn = os.path.join(tmpdir, 'bundle')
246 tmpfn = os.path.join(tmpdir, 'bundle')
246 btype = ui.config('patchbomb', 'bundletype')
247 btype = ui.config('patchbomb', 'bundletype')
247 if btype:
248 if btype:
248 opts['type'] = btype
249 opts['type'] = btype
249 try:
250 try:
250 commands.bundle(ui, repo, tmpfn, dest, **opts)
251 commands.bundle(ui, repo, tmpfn, dest, **opts)
251 return util.readfile(tmpfn)
252 return util.readfile(tmpfn)
252 finally:
253 finally:
253 try:
254 try:
254 os.unlink(tmpfn)
255 os.unlink(tmpfn)
255 except OSError:
256 except OSError:
256 pass
257 pass
257 os.rmdir(tmpdir)
258 os.rmdir(tmpdir)
258
259
259 def _getdescription(repo, defaultbody, sender, **opts):
260 def _getdescription(repo, defaultbody, sender, **opts):
260 """obtain the body of the introduction message and return it
261 """obtain the body of the introduction message and return it
261
262
262 This is also used for the body of email with an attached bundle.
263 This is also used for the body of email with an attached bundle.
263
264
264 The body can be obtained either from the command line option or entered by
265 The body can be obtained either from the command line option or entered by
265 the user through the editor.
266 the user through the editor.
266 """
267 """
267 ui = repo.ui
268 ui = repo.ui
268 if opts.get('desc'):
269 if opts.get('desc'):
269 body = open(opts.get('desc')).read()
270 body = open(opts.get('desc')).read()
270 else:
271 else:
271 ui.write(_('\nWrite the introductory message for the '
272 ui.write(_('\nWrite the introductory message for the '
272 'patch series.\n\n'))
273 'patch series.\n\n'))
273 body = ui.edit(defaultbody, sender)
274 body = ui.edit(defaultbody, sender)
274 # Save series description in case sendmail fails
275 # Save series description in case sendmail fails
275 msgfile = repo.vfs('last-email.txt', 'wb')
276 msgfile = repo.vfs('last-email.txt', 'wb')
276 msgfile.write(body)
277 msgfile.write(body)
277 msgfile.close()
278 msgfile.close()
278 return body
279 return body
279
280
280 def _getbundlemsgs(repo, sender, bundle, **opts):
281 def _getbundlemsgs(repo, sender, bundle, **opts):
281 """Get the full email for sending a given bundle
282 """Get the full email for sending a given bundle
282
283
283 This function returns a list of "email" tuples (subject, content, None).
284 This function returns a list of "email" tuples (subject, content, None).
284 The list is always one message long in that case.
285 The list is always one message long in that case.
285 """
286 """
286 ui = repo.ui
287 ui = repo.ui
287 _charsets = mail._charsets(ui)
288 _charsets = mail._charsets(ui)
288 subj = (opts.get('subject')
289 subj = (opts.get('subject')
289 or prompt(ui, 'Subject:', 'A bundle for your repository'))
290 or prompt(ui, 'Subject:', 'A bundle for your repository'))
290
291
291 body = _getdescription(repo, '', sender, **opts)
292 body = _getdescription(repo, '', sender, **opts)
292 msg = emailmod.MIMEMultipart.MIMEMultipart()
293 msg = emailmod.MIMEMultipart.MIMEMultipart()
293 if body:
294 if body:
294 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
295 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
295 datapart = emailmod.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
296 datapart = emailmod.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
296 datapart.set_payload(bundle)
297 datapart.set_payload(bundle)
297 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
298 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
298 datapart.add_header('Content-Disposition', 'attachment',
299 datapart.add_header('Content-Disposition', 'attachment',
299 filename=bundlename)
300 filename=bundlename)
300 emailmod.Encoders.encode_base64(datapart)
301 emailmod.Encoders.encode_base64(datapart)
301 msg.attach(datapart)
302 msg.attach(datapart)
302 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
303 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
303 return [(msg, subj, None)]
304 return [(msg, subj, None)]
304
305
305 def _makeintro(repo, sender, patches, **opts):
306 def _makeintro(repo, sender, patches, **opts):
306 """make an introduction email, asking the user for content if needed
307 """make an introduction email, asking the user for content if needed
307
308
308 email is returned as (subject, body, cumulative-diffstat)"""
309 email is returned as (subject, body, cumulative-diffstat)"""
309 ui = repo.ui
310 ui = repo.ui
310 _charsets = mail._charsets(ui)
311 _charsets = mail._charsets(ui)
311 tlen = len(str(len(patches)))
312 tlen = len(str(len(patches)))
312
313
313 flag = opts.get('flag') or ''
314 flag = opts.get('flag') or ''
314 if flag:
315 if flag:
315 flag = ' ' + ' '.join(flag)
316 flag = ' ' + ' '.join(flag)
316 prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
317 prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
317
318
318 subj = (opts.get('subject') or
319 subj = (opts.get('subject') or
319 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
320 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
320 if not subj:
321 if not subj:
321 return None # skip intro if the user doesn't bother
322 return None # skip intro if the user doesn't bother
322
323
323 subj = prefix + ' ' + subj
324 subj = prefix + ' ' + subj
324
325
325 body = ''
326 body = ''
326 if opts.get('diffstat'):
327 if opts.get('diffstat'):
327 # generate a cumulative diffstat of the whole patch series
328 # generate a cumulative diffstat of the whole patch series
328 diffstat = patch.diffstat(sum(patches, []))
329 diffstat = patch.diffstat(sum(patches, []))
329 body = '\n' + diffstat
330 body = '\n' + diffstat
330 else:
331 else:
331 diffstat = None
332 diffstat = None
332
333
333 body = _getdescription(repo, body, sender, **opts)
334 body = _getdescription(repo, body, sender, **opts)
334 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
335 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
335 msg['Subject'] = mail.headencode(ui, subj, _charsets,
336 msg['Subject'] = mail.headencode(ui, subj, _charsets,
336 opts.get('test'))
337 opts.get('test'))
337 return (msg, subj, diffstat)
338 return (msg, subj, diffstat)
338
339
339 def _getpatchmsgs(repo, sender, patches, patchnames=None, **opts):
340 def _getpatchmsgs(repo, sender, patches, patchnames=None, **opts):
340 """return a list of emails from a list of patches
341 """return a list of emails from a list of patches
341
342
342 This involves introduction message creation if necessary.
343 This involves introduction message creation if necessary.
343
344
344 This function returns a list of "email" tuples (subject, content, None).
345 This function returns a list of "email" tuples (subject, content, None).
345 """
346 """
346 ui = repo.ui
347 ui = repo.ui
347 _charsets = mail._charsets(ui)
348 _charsets = mail._charsets(ui)
348 msgs = []
349 msgs = []
349
350
350 ui.write(_('this patch series consists of %d patches.\n\n')
351 ui.write(_('this patch series consists of %d patches.\n\n')
351 % len(patches))
352 % len(patches))
352
353
353 # build the intro message, or skip it if the user declines
354 # build the intro message, or skip it if the user declines
354 if introwanted(ui, opts, len(patches)):
355 if introwanted(ui, opts, len(patches)):
355 msg = _makeintro(repo, sender, patches, **opts)
356 msg = _makeintro(repo, sender, patches, **opts)
356 if msg:
357 if msg:
357 msgs.append(msg)
358 msgs.append(msg)
358
359
359 # are we going to send more than one message?
360 # are we going to send more than one message?
360 numbered = len(msgs) + len(patches) > 1
361 numbered = len(msgs) + len(patches) > 1
361
362
362 # now generate the actual patch messages
363 # now generate the actual patch messages
363 name = None
364 name = None
364 for i, p in enumerate(patches):
365 for i, p in enumerate(patches):
365 if patchnames:
366 if patchnames:
366 name = patchnames[i]
367 name = patchnames[i]
367 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
368 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
368 len(patches), numbered, name)
369 len(patches), numbered, name)
369 msgs.append(msg)
370 msgs.append(msg)
370
371
371 return msgs
372 return msgs
372
373
373 def _getoutgoing(repo, dest, revs):
374 def _getoutgoing(repo, dest, revs):
374 '''Return the revisions present locally but not in dest'''
375 '''Return the revisions present locally but not in dest'''
375 ui = repo.ui
376 ui = repo.ui
376 url = ui.expandpath(dest or 'default-push', dest or 'default')
377 url = ui.expandpath(dest or 'default-push', dest or 'default')
377 url = hg.parseurl(url)[0]
378 url = hg.parseurl(url)[0]
378 ui.status(_('comparing with %s\n') % util.hidepassword(url))
379 ui.status(_('comparing with %s\n') % util.hidepassword(url))
379
380
380 revs = [r for r in revs if r >= 0]
381 revs = [r for r in revs if r >= 0]
381 if not revs:
382 if not revs:
382 revs = [len(repo) - 1]
383 revs = [len(repo) - 1]
383 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
384 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
384 if not revs:
385 if not revs:
385 ui.status(_("no changes found\n"))
386 ui.status(_("no changes found\n"))
386 return revs
387 return revs
387
388
388 emailopts = [
389 emailopts = [
389 ('', 'body', None, _('send patches as inline message text (default)')),
390 ('', 'body', None, _('send patches as inline message text (default)')),
390 ('a', 'attach', None, _('send patches as attachments')),
391 ('a', 'attach', None, _('send patches as attachments')),
391 ('i', 'inline', None, _('send patches as inline attachments')),
392 ('i', 'inline', None, _('send patches as inline attachments')),
392 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
393 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
393 ('c', 'cc', [], _('email addresses of copy recipients')),
394 ('c', 'cc', [], _('email addresses of copy recipients')),
394 ('', 'confirm', None, _('ask for confirmation before sending')),
395 ('', 'confirm', None, _('ask for confirmation before sending')),
395 ('d', 'diffstat', None, _('add diffstat output to messages')),
396 ('d', 'diffstat', None, _('add diffstat output to messages')),
396 ('', 'date', '', _('use the given date as the sending date')),
397 ('', 'date', '', _('use the given date as the sending date')),
397 ('', 'desc', '', _('use the given file as the series description')),
398 ('', 'desc', '', _('use the given file as the series description')),
398 ('f', 'from', '', _('email address of sender')),
399 ('f', 'from', '', _('email address of sender')),
399 ('n', 'test', None, _('print messages that would be sent')),
400 ('n', 'test', None, _('print messages that would be sent')),
400 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
401 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
401 ('', 'reply-to', [], _('email addresses replies should be sent to')),
402 ('', 'reply-to', [], _('email addresses replies should be sent to')),
402 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
403 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
403 ('', 'in-reply-to', '', _('message identifier to reply to')),
404 ('', 'in-reply-to', '', _('message identifier to reply to')),
404 ('', 'flag', [], _('flags to add in subject prefixes')),
405 ('', 'flag', [], _('flags to add in subject prefixes')),
405 ('t', 'to', [], _('email addresses of recipients'))]
406 ('t', 'to', [], _('email addresses of recipients'))]
406
407
407 @command('email',
408 @command('email',
408 [('g', 'git', None, _('use git extended diff format')),
409 [('g', 'git', None, _('use git extended diff format')),
409 ('', 'plain', None, _('omit hg patch header')),
410 ('', 'plain', None, _('omit hg patch header')),
410 ('o', 'outgoing', None,
411 ('o', 'outgoing', None,
411 _('send changes not found in the target repository')),
412 _('send changes not found in the target repository')),
412 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
413 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
413 ('', 'bundlename', 'bundle',
414 ('', 'bundlename', 'bundle',
414 _('name of the bundle attachment file'), _('NAME')),
415 _('name of the bundle attachment file'), _('NAME')),
415 ('r', 'rev', [], _('a revision to send'), _('REV')),
416 ('r', 'rev', [], _('a revision to send'), _('REV')),
416 ('', 'force', None, _('run even when remote repository is unrelated '
417 ('', 'force', None, _('run even when remote repository is unrelated '
417 '(with -b/--bundle)')),
418 '(with -b/--bundle)')),
418 ('', 'base', [], _('a base changeset to specify instead of a destination '
419 ('', 'base', [], _('a base changeset to specify instead of a destination '
419 '(with -b/--bundle)'), _('REV')),
420 '(with -b/--bundle)'), _('REV')),
420 ('', 'intro', None, _('send an introduction email for a single patch')),
421 ('', 'intro', None, _('send an introduction email for a single patch')),
421 ] + emailopts + commands.remoteopts,
422 ] + emailopts + commands.remoteopts,
422 _('hg email [OPTION]... [DEST]...'))
423 _('hg email [OPTION]... [DEST]...'))
423 def email(ui, repo, *revs, **opts):
424 def email(ui, repo, *revs, **opts):
424 '''send changesets by email
425 '''send changesets by email
425
426
426 By default, diffs are sent in the format generated by
427 By default, diffs are sent in the format generated by
427 :hg:`export`, one per message. The series starts with a "[PATCH 0
428 :hg:`export`, one per message. The series starts with a "[PATCH 0
428 of N]" introduction, which describes the series as a whole.
429 of N]" introduction, which describes the series as a whole.
429
430
430 Each patch email has a Subject line of "[PATCH M of N] ...", using
431 Each patch email has a Subject line of "[PATCH M of N] ...", using
431 the first line of the changeset description as the subject text.
432 the first line of the changeset description as the subject text.
432 The message contains two or three parts. First, the changeset
433 The message contains two or three parts. First, the changeset
433 description.
434 description.
434
435
435 With the -d/--diffstat option, if the diffstat program is
436 With the -d/--diffstat option, if the diffstat program is
436 installed, the result of running diffstat on the patch is inserted.
437 installed, the result of running diffstat on the patch is inserted.
437
438
438 Finally, the patch itself, as generated by :hg:`export`.
439 Finally, the patch itself, as generated by :hg:`export`.
439
440
440 With the -d/--diffstat or --confirm options, you will be presented
441 With the -d/--diffstat or --confirm options, you will be presented
441 with a final summary of all messages and asked for confirmation before
442 with a final summary of all messages and asked for confirmation before
442 the messages are sent.
443 the messages are sent.
443
444
444 By default the patch is included as text in the email body for
445 By default the patch is included as text in the email body for
445 easy reviewing. Using the -a/--attach option will instead create
446 easy reviewing. Using the -a/--attach option will instead create
446 an attachment for the patch. With -i/--inline an inline attachment
447 an attachment for the patch. With -i/--inline an inline attachment
447 will be created. You can include a patch both as text in the email
448 will be created. You can include a patch both as text in the email
448 body and as a regular or an inline attachment by combining the
449 body and as a regular or an inline attachment by combining the
449 -a/--attach or -i/--inline with the --body option.
450 -a/--attach or -i/--inline with the --body option.
450
451
451 With -o/--outgoing, emails will be generated for patches not found
452 With -o/--outgoing, emails will be generated for patches not found
452 in the destination repository (or only those which are ancestors
453 in the destination repository (or only those which are ancestors
453 of the specified revisions if any are provided)
454 of the specified revisions if any are provided)
454
455
455 With -b/--bundle, changesets are selected as for --outgoing, but a
456 With -b/--bundle, changesets are selected as for --outgoing, but a
456 single email containing a binary Mercurial bundle as an attachment
457 single email containing a binary Mercurial bundle as an attachment
457 will be sent. Use the ``patchbomb.bundletype`` config option to
458 will be sent. Use the ``patchbomb.bundletype`` config option to
458 control the bundle type as with :hg:`bundle --type`.
459 control the bundle type as with :hg:`bundle --type`.
459
460
460 With -m/--mbox, instead of previewing each patchbomb message in a
461 With -m/--mbox, instead of previewing each patchbomb message in a
461 pager or sending the messages directly, it will create a UNIX
462 pager or sending the messages directly, it will create a UNIX
462 mailbox file with the patch emails. This mailbox file can be
463 mailbox file with the patch emails. This mailbox file can be
463 previewed with any mail user agent which supports UNIX mbox
464 previewed with any mail user agent which supports UNIX mbox
464 files.
465 files.
465
466
466 With -n/--test, all steps will run, but mail will not be sent.
467 With -n/--test, all steps will run, but mail will not be sent.
467 You will be prompted for an email recipient address, a subject and
468 You will be prompted for an email recipient address, a subject and
468 an introductory message describing the patches of your patchbomb.
469 an introductory message describing the patches of your patchbomb.
469 Then when all is done, patchbomb messages are displayed. If the
470 Then when all is done, patchbomb messages are displayed. If the
470 PAGER environment variable is set, your pager will be fired up once
471 PAGER environment variable is set, your pager will be fired up once
471 for each patchbomb message, so you can verify everything is alright.
472 for each patchbomb message, so you can verify everything is alright.
472
473
473 In case email sending fails, you will find a backup of your series
474 In case email sending fails, you will find a backup of your series
474 introductory message in ``.hg/last-email.txt``.
475 introductory message in ``.hg/last-email.txt``.
475
476
476 The default behavior of this command can be customized through
477 The default behavior of this command can be customized through
477 configuration. (See :hg:`help patchbomb` for details)
478 configuration. (See :hg:`help patchbomb` for details)
478
479
479 Examples::
480 Examples::
480
481
481 hg email -r 3000 # send patch 3000 only
482 hg email -r 3000 # send patch 3000 only
482 hg email -r 3000 -r 3001 # send patches 3000 and 3001
483 hg email -r 3000 -r 3001 # send patches 3000 and 3001
483 hg email -r 3000:3005 # send patches 3000 through 3005
484 hg email -r 3000:3005 # send patches 3000 through 3005
484 hg email 3000 # send patch 3000 (deprecated)
485 hg email 3000 # send patch 3000 (deprecated)
485
486
486 hg email -o # send all patches not in default
487 hg email -o # send all patches not in default
487 hg email -o DEST # send all patches not in DEST
488 hg email -o DEST # send all patches not in DEST
488 hg email -o -r 3000 # send all ancestors of 3000 not in default
489 hg email -o -r 3000 # send all ancestors of 3000 not in default
489 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
490 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
490
491
491 hg email -b # send bundle of all patches not in default
492 hg email -b # send bundle of all patches not in default
492 hg email -b DEST # send bundle of all patches not in DEST
493 hg email -b DEST # send bundle of all patches not in DEST
493 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
494 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
494 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
495 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
495
496
496 hg email -o -m mbox && # generate an mbox file...
497 hg email -o -m mbox && # generate an mbox file...
497 mutt -R -f mbox # ... and view it with mutt
498 mutt -R -f mbox # ... and view it with mutt
498 hg email -o -m mbox && # generate an mbox file ...
499 hg email -o -m mbox && # generate an mbox file ...
499 formail -s sendmail \\ # ... and use formail to send from the mbox
500 formail -s sendmail \\ # ... and use formail to send from the mbox
500 -bm -t < mbox # ... using sendmail
501 -bm -t < mbox # ... using sendmail
501
502
502 Before using this command, you will need to enable email in your
503 Before using this command, you will need to enable email in your
503 hgrc. See the [email] section in hgrc(5) for details.
504 hgrc. See the [email] section in hgrc(5) for details.
504 '''
505 '''
505
506
506 _charsets = mail._charsets(ui)
507 _charsets = mail._charsets(ui)
507
508
508 bundle = opts.get('bundle')
509 bundle = opts.get('bundle')
509 date = opts.get('date')
510 date = opts.get('date')
510 mbox = opts.get('mbox')
511 mbox = opts.get('mbox')
511 outgoing = opts.get('outgoing')
512 outgoing = opts.get('outgoing')
512 rev = opts.get('rev')
513 rev = opts.get('rev')
513 # internal option used by pbranches
514 # internal option used by pbranches
514 patches = opts.get('patches')
515 patches = opts.get('patches')
515
516
516 if not (opts.get('test') or mbox):
517 if not (opts.get('test') or mbox):
517 # really sending
518 # really sending
518 mail.validateconfig(ui)
519 mail.validateconfig(ui)
519
520
520 if not (revs or rev or outgoing or bundle or patches):
521 if not (revs or rev or outgoing or bundle or patches):
521 raise error.Abort(_('specify at least one changeset with -r or -o'))
522 raise error.Abort(_('specify at least one changeset with -r or -o'))
522
523
523 if outgoing and bundle:
524 if outgoing and bundle:
524 raise error.Abort(_("--outgoing mode always on with --bundle;"
525 raise error.Abort(_("--outgoing mode always on with --bundle;"
525 " do not re-specify --outgoing"))
526 " do not re-specify --outgoing"))
526
527
527 if outgoing or bundle:
528 if outgoing or bundle:
528 if len(revs) > 1:
529 if len(revs) > 1:
529 raise error.Abort(_("too many destinations"))
530 raise error.Abort(_("too many destinations"))
530 if revs:
531 if revs:
531 dest = revs[0]
532 dest = revs[0]
532 else:
533 else:
533 dest = None
534 dest = None
534 revs = []
535 revs = []
535
536
536 if rev:
537 if rev:
537 if revs:
538 if revs:
538 raise error.Abort(_('use only one form to specify the revision'))
539 raise error.Abort(_('use only one form to specify the revision'))
539 revs = rev
540 revs = rev
540
541
541 revs = scmutil.revrange(repo, revs)
542 revs = scmutil.revrange(repo, revs)
542 if outgoing:
543 if outgoing:
543 revs = _getoutgoing(repo, dest, revs)
544 revs = _getoutgoing(repo, dest, revs)
544 if bundle:
545 if bundle:
545 opts['revs'] = [str(r) for r in revs]
546 opts['revs'] = [str(r) for r in revs]
546
547
547 # check if revision exist on the public destination
548 # check if revision exist on the public destination
548 publicurl = repo.ui.config('patchbomb', 'publicurl')
549 publicurl = repo.ui.config('patchbomb', 'publicurl')
549 if publicurl is not None:
550 if publicurl is not None:
550 repo.ui.debug('checking that revision exist in the public repo')
551 repo.ui.debug('checking that revision exist in the public repo')
551 try:
552 try:
552 publicpeer = hg.peer(repo, {}, publicurl)
553 publicpeer = hg.peer(repo, {}, publicurl)
553 except error.RepoError:
554 except error.RepoError:
554 repo.ui.write_err(_('unable to access public repo: %s\n')
555 repo.ui.write_err(_('unable to access public repo: %s\n')
555 % publicurl)
556 % publicurl)
556 raise
557 raise
557 if not publicpeer.capable('known'):
558 if not publicpeer.capable('known'):
558 repo.ui.debug('skipping existence checks: public repo too old')
559 repo.ui.debug('skipping existence checks: public repo too old')
559 else:
560 else:
560 out = [repo[r] for r in revs]
561 out = [repo[r] for r in revs]
561 known = publicpeer.known(h.node() for h in out)
562 known = publicpeer.known(h.node() for h in out)
562 missing = []
563 missing = []
563 for idx, h in enumerate(out):
564 for idx, h in enumerate(out):
564 if not known[idx]:
565 if not known[idx]:
565 missing.append(h)
566 missing.append(h)
566 if missing:
567 if missing:
567 if 1 < len(missing):
568 if 1 < len(missing):
568 msg = _('public "%s" is missing %s and %i others')
569 msg = _('public "%s" is missing %s and %i others')
569 msg %= (publicurl, missing[0], len(missing) - 1)
570 msg %= (publicurl, missing[0], len(missing) - 1)
570 else:
571 else:
571 msg = _('public url %s is missing %s')
572 msg = _('public url %s is missing %s')
572 msg %= (publicurl, missing[0])
573 msg %= (publicurl, missing[0])
573 revhint = ' '.join('-r %s' % h
574 revhint = ' '.join('-r %s' % h
574 for h in repo.set('heads(%ld)', missing))
575 for h in repo.set('heads(%ld)', missing))
575 hint = _("use 'hg push %s %s'") % (publicurl, revhint)
576 hint = _("use 'hg push %s %s'") % (publicurl, revhint)
576 raise error.Abort(msg, hint=hint)
577 raise error.Abort(msg, hint=hint)
577
578
578 # start
579 # start
579 if date:
580 if date:
580 start_time = util.parsedate(date)
581 start_time = util.parsedate(date)
581 else:
582 else:
582 start_time = util.makedate()
583 start_time = util.makedate()
583
584
584 def genmsgid(id):
585 def genmsgid(id):
585 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
586 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
586
587
587 # deprecated config: patchbomb.from
588 # deprecated config: patchbomb.from
588 sender = (opts.get('from') or ui.config('email', 'from') or
589 sender = (opts.get('from') or ui.config('email', 'from') or
589 ui.config('patchbomb', 'from') or
590 ui.config('patchbomb', 'from') or
590 prompt(ui, 'From', ui.username()))
591 prompt(ui, 'From', ui.username()))
591
592
592 if patches:
593 if patches:
593 msgs = _getpatchmsgs(repo, sender, patches, opts.get('patchnames'),
594 msgs = _getpatchmsgs(repo, sender, patches, opts.get('patchnames'),
594 **opts)
595 **opts)
595 elif bundle:
596 elif bundle:
596 bundledata = _getbundle(repo, dest, **opts)
597 bundledata = _getbundle(repo, dest, **opts)
597 bundleopts = opts.copy()
598 bundleopts = opts.copy()
598 bundleopts.pop('bundle', None) # already processed
599 bundleopts.pop('bundle', None) # already processed
599 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
600 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
600 else:
601 else:
601 _patches = list(_getpatches(repo, revs, **opts))
602 _patches = list(_getpatches(repo, revs, **opts))
602 msgs = _getpatchmsgs(repo, sender, _patches, **opts)
603 msgs = _getpatchmsgs(repo, sender, _patches, **opts)
603
604
604 showaddrs = []
605 showaddrs = []
605
606
606 def getaddrs(header, ask=False, default=None):
607 def getaddrs(header, ask=False, default=None):
607 configkey = header.lower()
608 configkey = header.lower()
608 opt = header.replace('-', '_').lower()
609 opt = header.replace('-', '_').lower()
609 addrs = opts.get(opt)
610 addrs = opts.get(opt)
610 if addrs:
611 if addrs:
611 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
612 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
612 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
613 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
613
614
614 # not on the command line: fallback to config and then maybe ask
615 # not on the command line: fallback to config and then maybe ask
615 addr = (ui.config('email', configkey) or
616 addr = (ui.config('email', configkey) or
616 ui.config('patchbomb', configkey))
617 ui.config('patchbomb', configkey))
617 if not addr:
618 if not addr:
618 specified = (ui.hasconfig('email', configkey) or
619 specified = (ui.hasconfig('email', configkey) or
619 ui.hasconfig('patchbomb', configkey))
620 ui.hasconfig('patchbomb', configkey))
620 if not specified and ask:
621 if not specified and ask:
621 addr = prompt(ui, header, default=default)
622 addr = prompt(ui, header, default=default)
622 if addr:
623 if addr:
623 showaddrs.append('%s: %s' % (header, addr))
624 showaddrs.append('%s: %s' % (header, addr))
624 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
625 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
625 else:
626 else:
626 return default
627 return default
627
628
628 to = getaddrs('To', ask=True)
629 to = getaddrs('To', ask=True)
629 if not to:
630 if not to:
630 # we can get here in non-interactive mode
631 # we can get here in non-interactive mode
631 raise error.Abort(_('no recipient addresses provided'))
632 raise error.Abort(_('no recipient addresses provided'))
632 cc = getaddrs('Cc', ask=True, default='') or []
633 cc = getaddrs('Cc', ask=True, default='') or []
633 bcc = getaddrs('Bcc') or []
634 bcc = getaddrs('Bcc') or []
634 replyto = getaddrs('Reply-To')
635 replyto = getaddrs('Reply-To')
635
636
636 confirm = ui.configbool('patchbomb', 'confirm')
637 confirm = ui.configbool('patchbomb', 'confirm')
637 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
638 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
638
639
639 if confirm:
640 if confirm:
640 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
641 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
641 ui.write(('From: %s\n' % sender), label='patchbomb.from')
642 ui.write(('From: %s\n' % sender), label='patchbomb.from')
642 for addr in showaddrs:
643 for addr in showaddrs:
643 ui.write('%s\n' % addr, label='patchbomb.to')
644 ui.write('%s\n' % addr, label='patchbomb.to')
644 for m, subj, ds in msgs:
645 for m, subj, ds in msgs:
645 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
646 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
646 if ds:
647 if ds:
647 ui.write(ds, label='patchbomb.diffstats')
648 ui.write(ds, label='patchbomb.diffstats')
648 ui.write('\n')
649 ui.write('\n')
649 if ui.promptchoice(_('are you sure you want to send (yn)?'
650 if ui.promptchoice(_('are you sure you want to send (yn)?'
650 '$$ &Yes $$ &No')):
651 '$$ &Yes $$ &No')):
651 raise error.Abort(_('patchbomb canceled'))
652 raise error.Abort(_('patchbomb canceled'))
652
653
653 ui.write('\n')
654 ui.write('\n')
654
655
655 parent = opts.get('in_reply_to') or None
656 parent = opts.get('in_reply_to') or None
656 # angle brackets may be omitted, they're not semantically part of the msg-id
657 # angle brackets may be omitted, they're not semantically part of the msg-id
657 if parent is not None:
658 if parent is not None:
658 if not parent.startswith('<'):
659 if not parent.startswith('<'):
659 parent = '<' + parent
660 parent = '<' + parent
660 if not parent.endswith('>'):
661 if not parent.endswith('>'):
661 parent += '>'
662 parent += '>'
662
663
663 sender_addr = emailmod.Utils.parseaddr(sender)[1]
664 sender_addr = emailmod.Utils.parseaddr(sender)[1]
664 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
665 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
665 sendmail = None
666 sendmail = None
666 firstpatch = None
667 firstpatch = None
667 for i, (m, subj, ds) in enumerate(msgs):
668 for i, (m, subj, ds) in enumerate(msgs):
668 try:
669 try:
669 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
670 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
670 if not firstpatch:
671 if not firstpatch:
671 firstpatch = m['Message-Id']
672 firstpatch = m['Message-Id']
672 m['X-Mercurial-Series-Id'] = firstpatch
673 m['X-Mercurial-Series-Id'] = firstpatch
673 except TypeError:
674 except TypeError:
674 m['Message-Id'] = genmsgid('patchbomb')
675 m['Message-Id'] = genmsgid('patchbomb')
675 if parent:
676 if parent:
676 m['In-Reply-To'] = parent
677 m['In-Reply-To'] = parent
677 m['References'] = parent
678 m['References'] = parent
678 if not parent or 'X-Mercurial-Node' not in m:
679 if not parent or 'X-Mercurial-Node' not in m:
679 parent = m['Message-Id']
680 parent = m['Message-Id']
680
681
681 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
682 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
682 m['Date'] = emailmod.Utils.formatdate(start_time[0], localtime=True)
683 m['Date'] = emailmod.Utils.formatdate(start_time[0], localtime=True)
683
684
684 start_time = (start_time[0] + 1, start_time[1])
685 start_time = (start_time[0] + 1, start_time[1])
685 m['From'] = sender
686 m['From'] = sender
686 m['To'] = ', '.join(to)
687 m['To'] = ', '.join(to)
687 if cc:
688 if cc:
688 m['Cc'] = ', '.join(cc)
689 m['Cc'] = ', '.join(cc)
689 if bcc:
690 if bcc:
690 m['Bcc'] = ', '.join(bcc)
691 m['Bcc'] = ', '.join(bcc)
691 if replyto:
692 if replyto:
692 m['Reply-To'] = ', '.join(replyto)
693 m['Reply-To'] = ', '.join(replyto)
693 if opts.get('test'):
694 if opts.get('test'):
694 ui.status(_('displaying '), subj, ' ...\n')
695 ui.status(_('displaying '), subj, ' ...\n')
695 ui.flush()
696 ui.flush()
696 if 'PAGER' in os.environ and not ui.plain():
697 if 'PAGER' in encoding.environ and not ui.plain():
697 fp = util.popen(os.environ['PAGER'], 'w')
698 fp = util.popen(encoding.environ['PAGER'], 'w')
698 else:
699 else:
699 fp = ui
700 fp = ui
700 generator = emailmod.Generator.Generator(fp, mangle_from_=False)
701 generator = emailmod.Generator.Generator(fp, mangle_from_=False)
701 try:
702 try:
702 generator.flatten(m, 0)
703 generator.flatten(m, 0)
703 fp.write('\n')
704 fp.write('\n')
704 except IOError as inst:
705 except IOError as inst:
705 if inst.errno != errno.EPIPE:
706 if inst.errno != errno.EPIPE:
706 raise
707 raise
707 if fp is not ui:
708 if fp is not ui:
708 fp.close()
709 fp.close()
709 else:
710 else:
710 if not sendmail:
711 if not sendmail:
711 sendmail = mail.connect(ui, mbox=mbox)
712 sendmail = mail.connect(ui, mbox=mbox)
712 ui.status(_('sending '), subj, ' ...\n')
713 ui.status(_('sending '), subj, ' ...\n')
713 ui.progress(_('sending'), i, item=subj, total=len(msgs),
714 ui.progress(_('sending'), i, item=subj, total=len(msgs),
714 unit=_('emails'))
715 unit=_('emails'))
715 if not mbox:
716 if not mbox:
716 # Exim does not remove the Bcc field
717 # Exim does not remove the Bcc field
717 del m['Bcc']
718 del m['Bcc']
718 fp = stringio()
719 fp = stringio()
719 generator = emailmod.Generator.Generator(fp, mangle_from_=False)
720 generator = emailmod.Generator.Generator(fp, mangle_from_=False)
720 generator.flatten(m, 0)
721 generator.flatten(m, 0)
721 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
722 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
722
723
723 ui.progress(_('writing'), None)
724 ui.progress(_('writing'), None)
724 ui.progress(_('sending'), None)
725 ui.progress(_('sending'), None)
@@ -1,638 +1,640 b''
1 # chgserver.py - command server extension for cHg
1 # chgserver.py - command server extension for cHg
2 #
2 #
3 # Copyright 2011 Yuya Nishihara <yuya@tcha.org>
3 # Copyright 2011 Yuya Nishihara <yuya@tcha.org>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """command server extension for cHg
8 """command server extension for cHg
9
9
10 'S' channel (read/write)
10 'S' channel (read/write)
11 propagate ui.system() request to client
11 propagate ui.system() request to client
12
12
13 'attachio' command
13 'attachio' command
14 attach client's stdio passed by sendmsg()
14 attach client's stdio passed by sendmsg()
15
15
16 'chdir' command
16 'chdir' command
17 change current directory
17 change current directory
18
18
19 'getpager' command
19 'getpager' command
20 checks if pager is enabled and which pager should be executed
20 checks if pager is enabled and which pager should be executed
21
21
22 'setenv' command
22 'setenv' command
23 replace os.environ completely
23 replace os.environ completely
24
24
25 'setumask' command
25 'setumask' command
26 set umask
26 set umask
27
27
28 'validate' command
28 'validate' command
29 reload the config and check if the server is up to date
29 reload the config and check if the server is up to date
30
30
31 Config
31 Config
32 ------
32 ------
33
33
34 ::
34 ::
35
35
36 [chgserver]
36 [chgserver]
37 idletimeout = 3600 # seconds, after which an idle server will exit
37 idletimeout = 3600 # seconds, after which an idle server will exit
38 skiphash = False # whether to skip config or env change checks
38 skiphash = False # whether to skip config or env change checks
39 """
39 """
40
40
41 from __future__ import absolute_import
41 from __future__ import absolute_import
42
42
43 import errno
43 import errno
44 import hashlib
44 import hashlib
45 import inspect
45 import inspect
46 import os
46 import os
47 import re
47 import re
48 import signal
48 import signal
49 import struct
49 import struct
50 import sys
50 import sys
51 import time
51 import time
52
52
53 from .i18n import _
53 from .i18n import _
54
54
55 from . import (
55 from . import (
56 cmdutil,
56 cmdutil,
57 commandserver,
57 commandserver,
58 encoding,
58 error,
59 error,
59 extensions,
60 extensions,
60 osutil,
61 osutil,
61 util,
62 util,
62 )
63 )
63
64
64 _log = commandserver.log
65 _log = commandserver.log
65
66
66 def _hashlist(items):
67 def _hashlist(items):
67 """return sha1 hexdigest for a list"""
68 """return sha1 hexdigest for a list"""
68 return hashlib.sha1(str(items)).hexdigest()
69 return hashlib.sha1(str(items)).hexdigest()
69
70
70 # sensitive config sections affecting confighash
71 # sensitive config sections affecting confighash
71 _configsections = [
72 _configsections = [
72 'alias', # affects global state commands.table
73 'alias', # affects global state commands.table
73 'extdiff', # uisetup will register new commands
74 'extdiff', # uisetup will register new commands
74 'extensions',
75 'extensions',
75 ]
76 ]
76
77
77 # sensitive environment variables affecting confighash
78 # sensitive environment variables affecting confighash
78 _envre = re.compile(r'''\A(?:
79 _envre = re.compile(r'''\A(?:
79 CHGHG
80 CHGHG
80 |HG(?:[A-Z].*)?
81 |HG(?:[A-Z].*)?
81 |LANG(?:UAGE)?
82 |LANG(?:UAGE)?
82 |LC_.*
83 |LC_.*
83 |LD_.*
84 |LD_.*
84 |PATH
85 |PATH
85 |PYTHON.*
86 |PYTHON.*
86 |TERM(?:INFO)?
87 |TERM(?:INFO)?
87 |TZ
88 |TZ
88 )\Z''', re.X)
89 )\Z''', re.X)
89
90
90 def _confighash(ui):
91 def _confighash(ui):
91 """return a quick hash for detecting config/env changes
92 """return a quick hash for detecting config/env changes
92
93
93 confighash is the hash of sensitive config items and environment variables.
94 confighash is the hash of sensitive config items and environment variables.
94
95
95 for chgserver, it is designed that once confighash changes, the server is
96 for chgserver, it is designed that once confighash changes, the server is
96 not qualified to serve its client and should redirect the client to a new
97 not qualified to serve its client and should redirect the client to a new
97 server. different from mtimehash, confighash change will not mark the
98 server. different from mtimehash, confighash change will not mark the
98 server outdated and exit since the user can have different configs at the
99 server outdated and exit since the user can have different configs at the
99 same time.
100 same time.
100 """
101 """
101 sectionitems = []
102 sectionitems = []
102 for section in _configsections:
103 for section in _configsections:
103 sectionitems.append(ui.configitems(section))
104 sectionitems.append(ui.configitems(section))
104 sectionhash = _hashlist(sectionitems)
105 sectionhash = _hashlist(sectionitems)
105 envitems = [(k, v) for k, v in os.environ.iteritems() if _envre.match(k)]
106 envitems = [(k, v) for k, v in encoding.environ.iteritems()
107 if _envre.match(k)]
106 envhash = _hashlist(sorted(envitems))
108 envhash = _hashlist(sorted(envitems))
107 return sectionhash[:6] + envhash[:6]
109 return sectionhash[:6] + envhash[:6]
108
110
109 def _getmtimepaths(ui):
111 def _getmtimepaths(ui):
110 """get a list of paths that should be checked to detect change
112 """get a list of paths that should be checked to detect change
111
113
112 The list will include:
114 The list will include:
113 - extensions (will not cover all files for complex extensions)
115 - extensions (will not cover all files for complex extensions)
114 - mercurial/__version__.py
116 - mercurial/__version__.py
115 - python binary
117 - python binary
116 """
118 """
117 modules = [m for n, m in extensions.extensions(ui)]
119 modules = [m for n, m in extensions.extensions(ui)]
118 try:
120 try:
119 from . import __version__
121 from . import __version__
120 modules.append(__version__)
122 modules.append(__version__)
121 except ImportError:
123 except ImportError:
122 pass
124 pass
123 files = [sys.executable]
125 files = [sys.executable]
124 for m in modules:
126 for m in modules:
125 try:
127 try:
126 files.append(inspect.getabsfile(m))
128 files.append(inspect.getabsfile(m))
127 except TypeError:
129 except TypeError:
128 pass
130 pass
129 return sorted(set(files))
131 return sorted(set(files))
130
132
131 def _mtimehash(paths):
133 def _mtimehash(paths):
132 """return a quick hash for detecting file changes
134 """return a quick hash for detecting file changes
133
135
134 mtimehash calls stat on given paths and calculate a hash based on size and
136 mtimehash calls stat on given paths and calculate a hash based on size and
135 mtime of each file. mtimehash does not read file content because reading is
137 mtime of each file. mtimehash does not read file content because reading is
136 expensive. therefore it's not 100% reliable for detecting content changes.
138 expensive. therefore it's not 100% reliable for detecting content changes.
137 it's possible to return different hashes for same file contents.
139 it's possible to return different hashes for same file contents.
138 it's also possible to return a same hash for different file contents for
140 it's also possible to return a same hash for different file contents for
139 some carefully crafted situation.
141 some carefully crafted situation.
140
142
141 for chgserver, it is designed that once mtimehash changes, the server is
143 for chgserver, it is designed that once mtimehash changes, the server is
142 considered outdated immediately and should no longer provide service.
144 considered outdated immediately and should no longer provide service.
143
145
144 mtimehash is not included in confighash because we only know the paths of
146 mtimehash is not included in confighash because we only know the paths of
145 extensions after importing them (there is imp.find_module but that faces
147 extensions after importing them (there is imp.find_module but that faces
146 race conditions). We need to calculate confighash without importing.
148 race conditions). We need to calculate confighash without importing.
147 """
149 """
148 def trystat(path):
150 def trystat(path):
149 try:
151 try:
150 st = os.stat(path)
152 st = os.stat(path)
151 return (st.st_mtime, st.st_size)
153 return (st.st_mtime, st.st_size)
152 except OSError:
154 except OSError:
153 # could be ENOENT, EPERM etc. not fatal in any case
155 # could be ENOENT, EPERM etc. not fatal in any case
154 pass
156 pass
155 return _hashlist(map(trystat, paths))[:12]
157 return _hashlist(map(trystat, paths))[:12]
156
158
157 class hashstate(object):
159 class hashstate(object):
158 """a structure storing confighash, mtimehash, paths used for mtimehash"""
160 """a structure storing confighash, mtimehash, paths used for mtimehash"""
159 def __init__(self, confighash, mtimehash, mtimepaths):
161 def __init__(self, confighash, mtimehash, mtimepaths):
160 self.confighash = confighash
162 self.confighash = confighash
161 self.mtimehash = mtimehash
163 self.mtimehash = mtimehash
162 self.mtimepaths = mtimepaths
164 self.mtimepaths = mtimepaths
163
165
164 @staticmethod
166 @staticmethod
165 def fromui(ui, mtimepaths=None):
167 def fromui(ui, mtimepaths=None):
166 if mtimepaths is None:
168 if mtimepaths is None:
167 mtimepaths = _getmtimepaths(ui)
169 mtimepaths = _getmtimepaths(ui)
168 confighash = _confighash(ui)
170 confighash = _confighash(ui)
169 mtimehash = _mtimehash(mtimepaths)
171 mtimehash = _mtimehash(mtimepaths)
170 _log('confighash = %s mtimehash = %s\n' % (confighash, mtimehash))
172 _log('confighash = %s mtimehash = %s\n' % (confighash, mtimehash))
171 return hashstate(confighash, mtimehash, mtimepaths)
173 return hashstate(confighash, mtimehash, mtimepaths)
172
174
173 # copied from hgext/pager.py:uisetup()
175 # copied from hgext/pager.py:uisetup()
174 def _setuppagercmd(ui, options, cmd):
176 def _setuppagercmd(ui, options, cmd):
175 from . import commands # avoid cycle
177 from . import commands # avoid cycle
176
178
177 if not ui.formatted():
179 if not ui.formatted():
178 return
180 return
179
181
180 p = ui.config("pager", "pager", os.environ.get("PAGER"))
182 p = ui.config("pager", "pager", encoding.environ.get("PAGER"))
181 usepager = False
183 usepager = False
182 always = util.parsebool(options['pager'])
184 always = util.parsebool(options['pager'])
183 auto = options['pager'] == 'auto'
185 auto = options['pager'] == 'auto'
184
186
185 if not p:
187 if not p:
186 pass
188 pass
187 elif always:
189 elif always:
188 usepager = True
190 usepager = True
189 elif not auto:
191 elif not auto:
190 usepager = False
192 usepager = False
191 else:
193 else:
192 attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff']
194 attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff']
193 attend = ui.configlist('pager', 'attend', attended)
195 attend = ui.configlist('pager', 'attend', attended)
194 ignore = ui.configlist('pager', 'ignore')
196 ignore = ui.configlist('pager', 'ignore')
195 cmds, _ = cmdutil.findcmd(cmd, commands.table)
197 cmds, _ = cmdutil.findcmd(cmd, commands.table)
196
198
197 for cmd in cmds:
199 for cmd in cmds:
198 var = 'attend-%s' % cmd
200 var = 'attend-%s' % cmd
199 if ui.config('pager', var):
201 if ui.config('pager', var):
200 usepager = ui.configbool('pager', var)
202 usepager = ui.configbool('pager', var)
201 break
203 break
202 if (cmd in attend or
204 if (cmd in attend or
203 (cmd not in ignore and not attend)):
205 (cmd not in ignore and not attend)):
204 usepager = True
206 usepager = True
205 break
207 break
206
208
207 if usepager:
209 if usepager:
208 ui.setconfig('ui', 'formatted', ui.formatted(), 'pager')
210 ui.setconfig('ui', 'formatted', ui.formatted(), 'pager')
209 ui.setconfig('ui', 'interactive', False, 'pager')
211 ui.setconfig('ui', 'interactive', False, 'pager')
210 return p
212 return p
211
213
212 def _newchgui(srcui, csystem):
214 def _newchgui(srcui, csystem):
213 class chgui(srcui.__class__):
215 class chgui(srcui.__class__):
214 def __init__(self, src=None):
216 def __init__(self, src=None):
215 super(chgui, self).__init__(src)
217 super(chgui, self).__init__(src)
216 if src:
218 if src:
217 self._csystem = getattr(src, '_csystem', csystem)
219 self._csystem = getattr(src, '_csystem', csystem)
218 else:
220 else:
219 self._csystem = csystem
221 self._csystem = csystem
220
222
221 def system(self, cmd, environ=None, cwd=None, onerr=None,
223 def system(self, cmd, environ=None, cwd=None, onerr=None,
222 errprefix=None):
224 errprefix=None):
223 # fallback to the original system method if the output needs to be
225 # fallback to the original system method if the output needs to be
224 # captured (to self._buffers), or the output stream is not stdout
226 # captured (to self._buffers), or the output stream is not stdout
225 # (e.g. stderr, cStringIO), because the chg client is not aware of
227 # (e.g. stderr, cStringIO), because the chg client is not aware of
226 # these situations and will behave differently (write to stdout).
228 # these situations and will behave differently (write to stdout).
227 if (any(s[1] for s in self._bufferstates)
229 if (any(s[1] for s in self._bufferstates)
228 or not util.safehasattr(self.fout, 'fileno')
230 or not util.safehasattr(self.fout, 'fileno')
229 or self.fout.fileno() != util.stdout.fileno()):
231 or self.fout.fileno() != util.stdout.fileno()):
230 return super(chgui, self).system(cmd, environ, cwd, onerr,
232 return super(chgui, self).system(cmd, environ, cwd, onerr,
231 errprefix)
233 errprefix)
232 # copied from mercurial/util.py:system()
234 # copied from mercurial/util.py:system()
233 self.flush()
235 self.flush()
234 def py2shell(val):
236 def py2shell(val):
235 if val is None or val is False:
237 if val is None or val is False:
236 return '0'
238 return '0'
237 if val is True:
239 if val is True:
238 return '1'
240 return '1'
239 return str(val)
241 return str(val)
240 env = os.environ.copy()
242 env = encoding.environ.copy()
241 if environ:
243 if environ:
242 env.update((k, py2shell(v)) for k, v in environ.iteritems())
244 env.update((k, py2shell(v)) for k, v in environ.iteritems())
243 env['HG'] = util.hgexecutable()
245 env['HG'] = util.hgexecutable()
244 rc = self._csystem(cmd, env, cwd)
246 rc = self._csystem(cmd, env, cwd)
245 if rc and onerr:
247 if rc and onerr:
246 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
248 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
247 util.explainexit(rc)[0])
249 util.explainexit(rc)[0])
248 if errprefix:
250 if errprefix:
249 errmsg = '%s: %s' % (errprefix, errmsg)
251 errmsg = '%s: %s' % (errprefix, errmsg)
250 raise onerr(errmsg)
252 raise onerr(errmsg)
251 return rc
253 return rc
252
254
253 return chgui(srcui)
255 return chgui(srcui)
254
256
255 def _loadnewui(srcui, args):
257 def _loadnewui(srcui, args):
256 from . import dispatch # avoid cycle
258 from . import dispatch # avoid cycle
257
259
258 newui = srcui.__class__.load()
260 newui = srcui.__class__.load()
259 for a in ['fin', 'fout', 'ferr', 'environ']:
261 for a in ['fin', 'fout', 'ferr', 'environ']:
260 setattr(newui, a, getattr(srcui, a))
262 setattr(newui, a, getattr(srcui, a))
261 if util.safehasattr(srcui, '_csystem'):
263 if util.safehasattr(srcui, '_csystem'):
262 newui._csystem = srcui._csystem
264 newui._csystem = srcui._csystem
263
265
264 # command line args
266 # command line args
265 args = args[:]
267 args = args[:]
266 dispatch._parseconfig(newui, dispatch._earlygetopt(['--config'], args))
268 dispatch._parseconfig(newui, dispatch._earlygetopt(['--config'], args))
267
269
268 # stolen from tortoisehg.util.copydynamicconfig()
270 # stolen from tortoisehg.util.copydynamicconfig()
269 for section, name, value in srcui.walkconfig():
271 for section, name, value in srcui.walkconfig():
270 source = srcui.configsource(section, name)
272 source = srcui.configsource(section, name)
271 if ':' in source or source == '--config':
273 if ':' in source or source == '--config':
272 # path:line or command line
274 # path:line or command line
273 continue
275 continue
274 newui.setconfig(section, name, value, source)
276 newui.setconfig(section, name, value, source)
275
277
276 # load wd and repo config, copied from dispatch.py
278 # load wd and repo config, copied from dispatch.py
277 cwds = dispatch._earlygetopt(['--cwd'], args)
279 cwds = dispatch._earlygetopt(['--cwd'], args)
278 cwd = cwds and os.path.realpath(cwds[-1]) or None
280 cwd = cwds and os.path.realpath(cwds[-1]) or None
279 rpath = dispatch._earlygetopt(["-R", "--repository", "--repo"], args)
281 rpath = dispatch._earlygetopt(["-R", "--repository", "--repo"], args)
280 path, newlui = dispatch._getlocal(newui, rpath, wd=cwd)
282 path, newlui = dispatch._getlocal(newui, rpath, wd=cwd)
281
283
282 return (newui, newlui)
284 return (newui, newlui)
283
285
284 class channeledsystem(object):
286 class channeledsystem(object):
285 """Propagate ui.system() request in the following format:
287 """Propagate ui.system() request in the following format:
286
288
287 payload length (unsigned int),
289 payload length (unsigned int),
288 cmd, '\0',
290 cmd, '\0',
289 cwd, '\0',
291 cwd, '\0',
290 envkey, '=', val, '\0',
292 envkey, '=', val, '\0',
291 ...
293 ...
292 envkey, '=', val
294 envkey, '=', val
293
295
294 and waits:
296 and waits:
295
297
296 exitcode length (unsigned int),
298 exitcode length (unsigned int),
297 exitcode (int)
299 exitcode (int)
298 """
300 """
299 def __init__(self, in_, out, channel):
301 def __init__(self, in_, out, channel):
300 self.in_ = in_
302 self.in_ = in_
301 self.out = out
303 self.out = out
302 self.channel = channel
304 self.channel = channel
303
305
304 def __call__(self, cmd, environ, cwd):
306 def __call__(self, cmd, environ, cwd):
305 args = [util.quotecommand(cmd), os.path.abspath(cwd or '.')]
307 args = [util.quotecommand(cmd), os.path.abspath(cwd or '.')]
306 args.extend('%s=%s' % (k, v) for k, v in environ.iteritems())
308 args.extend('%s=%s' % (k, v) for k, v in environ.iteritems())
307 data = '\0'.join(args)
309 data = '\0'.join(args)
308 self.out.write(struct.pack('>cI', self.channel, len(data)))
310 self.out.write(struct.pack('>cI', self.channel, len(data)))
309 self.out.write(data)
311 self.out.write(data)
310 self.out.flush()
312 self.out.flush()
311
313
312 length = self.in_.read(4)
314 length = self.in_.read(4)
313 length, = struct.unpack('>I', length)
315 length, = struct.unpack('>I', length)
314 if length != 4:
316 if length != 4:
315 raise error.Abort(_('invalid response'))
317 raise error.Abort(_('invalid response'))
316 rc, = struct.unpack('>i', self.in_.read(4))
318 rc, = struct.unpack('>i', self.in_.read(4))
317 return rc
319 return rc
318
320
319 _iochannels = [
321 _iochannels = [
320 # server.ch, ui.fp, mode
322 # server.ch, ui.fp, mode
321 ('cin', 'fin', 'rb'),
323 ('cin', 'fin', 'rb'),
322 ('cout', 'fout', 'wb'),
324 ('cout', 'fout', 'wb'),
323 ('cerr', 'ferr', 'wb'),
325 ('cerr', 'ferr', 'wb'),
324 ]
326 ]
325
327
326 class chgcmdserver(commandserver.server):
328 class chgcmdserver(commandserver.server):
327 def __init__(self, ui, repo, fin, fout, sock, hashstate, baseaddress):
329 def __init__(self, ui, repo, fin, fout, sock, hashstate, baseaddress):
328 super(chgcmdserver, self).__init__(
330 super(chgcmdserver, self).__init__(
329 _newchgui(ui, channeledsystem(fin, fout, 'S')), repo, fin, fout)
331 _newchgui(ui, channeledsystem(fin, fout, 'S')), repo, fin, fout)
330 self.clientsock = sock
332 self.clientsock = sock
331 self._oldios = [] # original (self.ch, ui.fp, fd) before "attachio"
333 self._oldios = [] # original (self.ch, ui.fp, fd) before "attachio"
332 self.hashstate = hashstate
334 self.hashstate = hashstate
333 self.baseaddress = baseaddress
335 self.baseaddress = baseaddress
334 if hashstate is not None:
336 if hashstate is not None:
335 self.capabilities = self.capabilities.copy()
337 self.capabilities = self.capabilities.copy()
336 self.capabilities['validate'] = chgcmdserver.validate
338 self.capabilities['validate'] = chgcmdserver.validate
337
339
338 def cleanup(self):
340 def cleanup(self):
339 super(chgcmdserver, self).cleanup()
341 super(chgcmdserver, self).cleanup()
340 # dispatch._runcatch() does not flush outputs if exception is not
342 # dispatch._runcatch() does not flush outputs if exception is not
341 # handled by dispatch._dispatch()
343 # handled by dispatch._dispatch()
342 self.ui.flush()
344 self.ui.flush()
343 self._restoreio()
345 self._restoreio()
344
346
345 def attachio(self):
347 def attachio(self):
346 """Attach to client's stdio passed via unix domain socket; all
348 """Attach to client's stdio passed via unix domain socket; all
347 channels except cresult will no longer be used
349 channels except cresult will no longer be used
348 """
350 """
349 # tell client to sendmsg() with 1-byte payload, which makes it
351 # tell client to sendmsg() with 1-byte payload, which makes it
350 # distinctive from "attachio\n" command consumed by client.read()
352 # distinctive from "attachio\n" command consumed by client.read()
351 self.clientsock.sendall(struct.pack('>cI', 'I', 1))
353 self.clientsock.sendall(struct.pack('>cI', 'I', 1))
352 clientfds = osutil.recvfds(self.clientsock.fileno())
354 clientfds = osutil.recvfds(self.clientsock.fileno())
353 _log('received fds: %r\n' % clientfds)
355 _log('received fds: %r\n' % clientfds)
354
356
355 ui = self.ui
357 ui = self.ui
356 ui.flush()
358 ui.flush()
357 first = self._saveio()
359 first = self._saveio()
358 for fd, (cn, fn, mode) in zip(clientfds, _iochannels):
360 for fd, (cn, fn, mode) in zip(clientfds, _iochannels):
359 assert fd > 0
361 assert fd > 0
360 fp = getattr(ui, fn)
362 fp = getattr(ui, fn)
361 os.dup2(fd, fp.fileno())
363 os.dup2(fd, fp.fileno())
362 os.close(fd)
364 os.close(fd)
363 if not first:
365 if not first:
364 continue
366 continue
365 # reset buffering mode when client is first attached. as we want
367 # reset buffering mode when client is first attached. as we want
366 # to see output immediately on pager, the mode stays unchanged
368 # to see output immediately on pager, the mode stays unchanged
367 # when client re-attached. ferr is unchanged because it should
369 # when client re-attached. ferr is unchanged because it should
368 # be unbuffered no matter if it is a tty or not.
370 # be unbuffered no matter if it is a tty or not.
369 if fn == 'ferr':
371 if fn == 'ferr':
370 newfp = fp
372 newfp = fp
371 else:
373 else:
372 # make it line buffered explicitly because the default is
374 # make it line buffered explicitly because the default is
373 # decided on first write(), where fout could be a pager.
375 # decided on first write(), where fout could be a pager.
374 if fp.isatty():
376 if fp.isatty():
375 bufsize = 1 # line buffered
377 bufsize = 1 # line buffered
376 else:
378 else:
377 bufsize = -1 # system default
379 bufsize = -1 # system default
378 newfp = os.fdopen(fp.fileno(), mode, bufsize)
380 newfp = os.fdopen(fp.fileno(), mode, bufsize)
379 setattr(ui, fn, newfp)
381 setattr(ui, fn, newfp)
380 setattr(self, cn, newfp)
382 setattr(self, cn, newfp)
381
383
382 self.cresult.write(struct.pack('>i', len(clientfds)))
384 self.cresult.write(struct.pack('>i', len(clientfds)))
383
385
384 def _saveio(self):
386 def _saveio(self):
385 if self._oldios:
387 if self._oldios:
386 return False
388 return False
387 ui = self.ui
389 ui = self.ui
388 for cn, fn, _mode in _iochannels:
390 for cn, fn, _mode in _iochannels:
389 ch = getattr(self, cn)
391 ch = getattr(self, cn)
390 fp = getattr(ui, fn)
392 fp = getattr(ui, fn)
391 fd = os.dup(fp.fileno())
393 fd = os.dup(fp.fileno())
392 self._oldios.append((ch, fp, fd))
394 self._oldios.append((ch, fp, fd))
393 return True
395 return True
394
396
395 def _restoreio(self):
397 def _restoreio(self):
396 ui = self.ui
398 ui = self.ui
397 for (ch, fp, fd), (cn, fn, _mode) in zip(self._oldios, _iochannels):
399 for (ch, fp, fd), (cn, fn, _mode) in zip(self._oldios, _iochannels):
398 newfp = getattr(ui, fn)
400 newfp = getattr(ui, fn)
399 # close newfp while it's associated with client; otherwise it
401 # close newfp while it's associated with client; otherwise it
400 # would be closed when newfp is deleted
402 # would be closed when newfp is deleted
401 if newfp is not fp:
403 if newfp is not fp:
402 newfp.close()
404 newfp.close()
403 # restore original fd: fp is open again
405 # restore original fd: fp is open again
404 os.dup2(fd, fp.fileno())
406 os.dup2(fd, fp.fileno())
405 os.close(fd)
407 os.close(fd)
406 setattr(self, cn, ch)
408 setattr(self, cn, ch)
407 setattr(ui, fn, fp)
409 setattr(ui, fn, fp)
408 del self._oldios[:]
410 del self._oldios[:]
409
411
410 def validate(self):
412 def validate(self):
411 """Reload the config and check if the server is up to date
413 """Reload the config and check if the server is up to date
412
414
413 Read a list of '\0' separated arguments.
415 Read a list of '\0' separated arguments.
414 Write a non-empty list of '\0' separated instruction strings or '\0'
416 Write a non-empty list of '\0' separated instruction strings or '\0'
415 if the list is empty.
417 if the list is empty.
416 An instruction string could be either:
418 An instruction string could be either:
417 - "unlink $path", the client should unlink the path to stop the
419 - "unlink $path", the client should unlink the path to stop the
418 outdated server.
420 outdated server.
419 - "redirect $path", the client should attempt to connect to $path
421 - "redirect $path", the client should attempt to connect to $path
420 first. If it does not work, start a new server. It implies
422 first. If it does not work, start a new server. It implies
421 "reconnect".
423 "reconnect".
422 - "exit $n", the client should exit directly with code n.
424 - "exit $n", the client should exit directly with code n.
423 This may happen if we cannot parse the config.
425 This may happen if we cannot parse the config.
424 - "reconnect", the client should close the connection and
426 - "reconnect", the client should close the connection and
425 reconnect.
427 reconnect.
426 If neither "reconnect" nor "redirect" is included in the instruction
428 If neither "reconnect" nor "redirect" is included in the instruction
427 list, the client can continue with this server after completing all
429 list, the client can continue with this server after completing all
428 the instructions.
430 the instructions.
429 """
431 """
430 from . import dispatch # avoid cycle
432 from . import dispatch # avoid cycle
431
433
432 args = self._readlist()
434 args = self._readlist()
433 try:
435 try:
434 self.ui, lui = _loadnewui(self.ui, args)
436 self.ui, lui = _loadnewui(self.ui, args)
435 except error.ParseError as inst:
437 except error.ParseError as inst:
436 dispatch._formatparse(self.ui.warn, inst)
438 dispatch._formatparse(self.ui.warn, inst)
437 self.ui.flush()
439 self.ui.flush()
438 self.cresult.write('exit 255')
440 self.cresult.write('exit 255')
439 return
441 return
440 newhash = hashstate.fromui(lui, self.hashstate.mtimepaths)
442 newhash = hashstate.fromui(lui, self.hashstate.mtimepaths)
441 insts = []
443 insts = []
442 if newhash.mtimehash != self.hashstate.mtimehash:
444 if newhash.mtimehash != self.hashstate.mtimehash:
443 addr = _hashaddress(self.baseaddress, self.hashstate.confighash)
445 addr = _hashaddress(self.baseaddress, self.hashstate.confighash)
444 insts.append('unlink %s' % addr)
446 insts.append('unlink %s' % addr)
445 # mtimehash is empty if one or more extensions fail to load.
447 # mtimehash is empty if one or more extensions fail to load.
446 # to be compatible with hg, still serve the client this time.
448 # to be compatible with hg, still serve the client this time.
447 if self.hashstate.mtimehash:
449 if self.hashstate.mtimehash:
448 insts.append('reconnect')
450 insts.append('reconnect')
449 if newhash.confighash != self.hashstate.confighash:
451 if newhash.confighash != self.hashstate.confighash:
450 addr = _hashaddress(self.baseaddress, newhash.confighash)
452 addr = _hashaddress(self.baseaddress, newhash.confighash)
451 insts.append('redirect %s' % addr)
453 insts.append('redirect %s' % addr)
452 _log('validate: %s\n' % insts)
454 _log('validate: %s\n' % insts)
453 self.cresult.write('\0'.join(insts) or '\0')
455 self.cresult.write('\0'.join(insts) or '\0')
454
456
455 def chdir(self):
457 def chdir(self):
456 """Change current directory
458 """Change current directory
457
459
458 Note that the behavior of --cwd option is bit different from this.
460 Note that the behavior of --cwd option is bit different from this.
459 It does not affect --config parameter.
461 It does not affect --config parameter.
460 """
462 """
461 path = self._readstr()
463 path = self._readstr()
462 if not path:
464 if not path:
463 return
465 return
464 _log('chdir to %r\n' % path)
466 _log('chdir to %r\n' % path)
465 os.chdir(path)
467 os.chdir(path)
466
468
467 def setumask(self):
469 def setumask(self):
468 """Change umask"""
470 """Change umask"""
469 mask = struct.unpack('>I', self._read(4))[0]
471 mask = struct.unpack('>I', self._read(4))[0]
470 _log('setumask %r\n' % mask)
472 _log('setumask %r\n' % mask)
471 os.umask(mask)
473 os.umask(mask)
472
474
473 def getpager(self):
475 def getpager(self):
474 """Read cmdargs and write pager command to r-channel if enabled
476 """Read cmdargs and write pager command to r-channel if enabled
475
477
476 If pager isn't enabled, this writes '\0' because channeledoutput
478 If pager isn't enabled, this writes '\0' because channeledoutput
477 does not allow to write empty data.
479 does not allow to write empty data.
478 """
480 """
479 from . import dispatch # avoid cycle
481 from . import dispatch # avoid cycle
480
482
481 args = self._readlist()
483 args = self._readlist()
482 try:
484 try:
483 cmd, _func, args, options, _cmdoptions = dispatch._parse(self.ui,
485 cmd, _func, args, options, _cmdoptions = dispatch._parse(self.ui,
484 args)
486 args)
485 except (error.Abort, error.AmbiguousCommand, error.CommandError,
487 except (error.Abort, error.AmbiguousCommand, error.CommandError,
486 error.UnknownCommand):
488 error.UnknownCommand):
487 cmd = None
489 cmd = None
488 options = {}
490 options = {}
489 if not cmd or 'pager' not in options:
491 if not cmd or 'pager' not in options:
490 self.cresult.write('\0')
492 self.cresult.write('\0')
491 return
493 return
492
494
493 pagercmd = _setuppagercmd(self.ui, options, cmd)
495 pagercmd = _setuppagercmd(self.ui, options, cmd)
494 if pagercmd:
496 if pagercmd:
495 # Python's SIGPIPE is SIG_IGN by default. change to SIG_DFL so
497 # Python's SIGPIPE is SIG_IGN by default. change to SIG_DFL so
496 # we can exit if the pipe to the pager is closed
498 # we can exit if the pipe to the pager is closed
497 if util.safehasattr(signal, 'SIGPIPE') and \
499 if util.safehasattr(signal, 'SIGPIPE') and \
498 signal.getsignal(signal.SIGPIPE) == signal.SIG_IGN:
500 signal.getsignal(signal.SIGPIPE) == signal.SIG_IGN:
499 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
501 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
500 self.cresult.write(pagercmd)
502 self.cresult.write(pagercmd)
501 else:
503 else:
502 self.cresult.write('\0')
504 self.cresult.write('\0')
503
505
504 def setenv(self):
506 def setenv(self):
505 """Clear and update os.environ
507 """Clear and update os.environ
506
508
507 Note that not all variables can make an effect on the running process.
509 Note that not all variables can make an effect on the running process.
508 """
510 """
509 l = self._readlist()
511 l = self._readlist()
510 try:
512 try:
511 newenv = dict(s.split('=', 1) for s in l)
513 newenv = dict(s.split('=', 1) for s in l)
512 except ValueError:
514 except ValueError:
513 raise ValueError('unexpected value in setenv request')
515 raise ValueError('unexpected value in setenv request')
514 _log('setenv: %r\n' % sorted(newenv.keys()))
516 _log('setenv: %r\n' % sorted(newenv.keys()))
515 os.environ.clear()
517 encoding.environ.clear()
516 os.environ.update(newenv)
518 encoding.environ.update(newenv)
517
519
518 capabilities = commandserver.server.capabilities.copy()
520 capabilities = commandserver.server.capabilities.copy()
519 capabilities.update({'attachio': attachio,
521 capabilities.update({'attachio': attachio,
520 'chdir': chdir,
522 'chdir': chdir,
521 'getpager': getpager,
523 'getpager': getpager,
522 'setenv': setenv,
524 'setenv': setenv,
523 'setumask': setumask})
525 'setumask': setumask})
524
526
525 def _tempaddress(address):
527 def _tempaddress(address):
526 return '%s.%d.tmp' % (address, os.getpid())
528 return '%s.%d.tmp' % (address, os.getpid())
527
529
528 def _hashaddress(address, hashstr):
530 def _hashaddress(address, hashstr):
529 # if the basename of address contains '.', use only the left part. this
531 # if the basename of address contains '.', use only the left part. this
530 # makes it possible for the client to pass 'server.tmp$PID' and follow by
532 # makes it possible for the client to pass 'server.tmp$PID' and follow by
531 # an atomic rename to avoid locking when spawning new servers.
533 # an atomic rename to avoid locking when spawning new servers.
532 dirname, basename = os.path.split(address)
534 dirname, basename = os.path.split(address)
533 basename = basename.split('.', 1)[0]
535 basename = basename.split('.', 1)[0]
534 return '%s-%s' % (os.path.join(dirname, basename), hashstr)
536 return '%s-%s' % (os.path.join(dirname, basename), hashstr)
535
537
536 class chgunixservicehandler(object):
538 class chgunixservicehandler(object):
537 """Set of operations for chg services"""
539 """Set of operations for chg services"""
538
540
539 pollinterval = 1 # [sec]
541 pollinterval = 1 # [sec]
540
542
541 def __init__(self, ui):
543 def __init__(self, ui):
542 self.ui = ui
544 self.ui = ui
543 self._idletimeout = ui.configint('chgserver', 'idletimeout', 3600)
545 self._idletimeout = ui.configint('chgserver', 'idletimeout', 3600)
544 self._lastactive = time.time()
546 self._lastactive = time.time()
545
547
546 def bindsocket(self, sock, address):
548 def bindsocket(self, sock, address):
547 self._inithashstate(address)
549 self._inithashstate(address)
548 self._checkextensions()
550 self._checkextensions()
549 self._bind(sock)
551 self._bind(sock)
550 self._createsymlink()
552 self._createsymlink()
551
553
552 def _inithashstate(self, address):
554 def _inithashstate(self, address):
553 self._baseaddress = address
555 self._baseaddress = address
554 if self.ui.configbool('chgserver', 'skiphash', False):
556 if self.ui.configbool('chgserver', 'skiphash', False):
555 self._hashstate = None
557 self._hashstate = None
556 self._realaddress = address
558 self._realaddress = address
557 return
559 return
558 self._hashstate = hashstate.fromui(self.ui)
560 self._hashstate = hashstate.fromui(self.ui)
559 self._realaddress = _hashaddress(address, self._hashstate.confighash)
561 self._realaddress = _hashaddress(address, self._hashstate.confighash)
560
562
561 def _checkextensions(self):
563 def _checkextensions(self):
562 if not self._hashstate:
564 if not self._hashstate:
563 return
565 return
564 if extensions.notloaded():
566 if extensions.notloaded():
565 # one or more extensions failed to load. mtimehash becomes
567 # one or more extensions failed to load. mtimehash becomes
566 # meaningless because we do not know the paths of those extensions.
568 # meaningless because we do not know the paths of those extensions.
567 # set mtimehash to an illegal hash value to invalidate the server.
569 # set mtimehash to an illegal hash value to invalidate the server.
568 self._hashstate.mtimehash = ''
570 self._hashstate.mtimehash = ''
569
571
570 def _bind(self, sock):
572 def _bind(self, sock):
571 # use a unique temp address so we can stat the file and do ownership
573 # use a unique temp address so we can stat the file and do ownership
572 # check later
574 # check later
573 tempaddress = _tempaddress(self._realaddress)
575 tempaddress = _tempaddress(self._realaddress)
574 util.bindunixsocket(sock, tempaddress)
576 util.bindunixsocket(sock, tempaddress)
575 self._socketstat = os.stat(tempaddress)
577 self._socketstat = os.stat(tempaddress)
576 # rename will replace the old socket file if exists atomically. the
578 # rename will replace the old socket file if exists atomically. the
577 # old server will detect ownership change and exit.
579 # old server will detect ownership change and exit.
578 util.rename(tempaddress, self._realaddress)
580 util.rename(tempaddress, self._realaddress)
579
581
580 def _createsymlink(self):
582 def _createsymlink(self):
581 if self._baseaddress == self._realaddress:
583 if self._baseaddress == self._realaddress:
582 return
584 return
583 tempaddress = _tempaddress(self._baseaddress)
585 tempaddress = _tempaddress(self._baseaddress)
584 os.symlink(os.path.basename(self._realaddress), tempaddress)
586 os.symlink(os.path.basename(self._realaddress), tempaddress)
585 util.rename(tempaddress, self._baseaddress)
587 util.rename(tempaddress, self._baseaddress)
586
588
587 def _issocketowner(self):
589 def _issocketowner(self):
588 try:
590 try:
589 stat = os.stat(self._realaddress)
591 stat = os.stat(self._realaddress)
590 return (stat.st_ino == self._socketstat.st_ino and
592 return (stat.st_ino == self._socketstat.st_ino and
591 stat.st_mtime == self._socketstat.st_mtime)
593 stat.st_mtime == self._socketstat.st_mtime)
592 except OSError:
594 except OSError:
593 return False
595 return False
594
596
595 def unlinksocket(self, address):
597 def unlinksocket(self, address):
596 if not self._issocketowner():
598 if not self._issocketowner():
597 return
599 return
598 # it is possible to have a race condition here that we may
600 # it is possible to have a race condition here that we may
599 # remove another server's socket file. but that's okay
601 # remove another server's socket file. but that's okay
600 # since that server will detect and exit automatically and
602 # since that server will detect and exit automatically and
601 # the client will start a new server on demand.
603 # the client will start a new server on demand.
602 try:
604 try:
603 os.unlink(self._realaddress)
605 os.unlink(self._realaddress)
604 except OSError as exc:
606 except OSError as exc:
605 if exc.errno != errno.ENOENT:
607 if exc.errno != errno.ENOENT:
606 raise
608 raise
607
609
608 def printbanner(self, address):
610 def printbanner(self, address):
609 # no "listening at" message should be printed to simulate hg behavior
611 # no "listening at" message should be printed to simulate hg behavior
610 pass
612 pass
611
613
612 def shouldexit(self):
614 def shouldexit(self):
613 if not self._issocketowner():
615 if not self._issocketowner():
614 self.ui.debug('%s is not owned, exiting.\n' % self._realaddress)
616 self.ui.debug('%s is not owned, exiting.\n' % self._realaddress)
615 return True
617 return True
616 if time.time() - self._lastactive > self._idletimeout:
618 if time.time() - self._lastactive > self._idletimeout:
617 self.ui.debug('being idle too long. exiting.\n')
619 self.ui.debug('being idle too long. exiting.\n')
618 return True
620 return True
619 return False
621 return False
620
622
621 def newconnection(self):
623 def newconnection(self):
622 self._lastactive = time.time()
624 self._lastactive = time.time()
623
625
624 def createcmdserver(self, repo, conn, fin, fout):
626 def createcmdserver(self, repo, conn, fin, fout):
625 return chgcmdserver(self.ui, repo, fin, fout, conn,
627 return chgcmdserver(self.ui, repo, fin, fout, conn,
626 self._hashstate, self._baseaddress)
628 self._hashstate, self._baseaddress)
627
629
628 def chgunixservice(ui, repo, opts):
630 def chgunixservice(ui, repo, opts):
629 # CHGINTERNALMARK is temporarily set by chg client to detect if chg will
631 # CHGINTERNALMARK is temporarily set by chg client to detect if chg will
630 # start another chg. drop it to avoid possible side effects.
632 # start another chg. drop it to avoid possible side effects.
631 if 'CHGINTERNALMARK' in os.environ:
633 if 'CHGINTERNALMARK' in encoding.environ:
632 del os.environ['CHGINTERNALMARK']
634 del encoding.environ['CHGINTERNALMARK']
633
635
634 if repo:
636 if repo:
635 # one chgserver can serve multiple repos. drop repo information
637 # one chgserver can serve multiple repos. drop repo information
636 ui.setconfig('bundle', 'mainreporoot', '', 'repo')
638 ui.setconfig('bundle', 'mainreporoot', '', 'repo')
637 h = chgunixservicehandler(ui)
639 h = chgunixservicehandler(ui)
638 return commandserver.unixforkingservice(ui, repo=None, opts=opts, handler=h)
640 return commandserver.unixforkingservice(ui, repo=None, opts=opts, handler=h)
@@ -1,135 +1,135 b''
1 # sshserver.py - ssh protocol server support for mercurial
1 # sshserver.py - ssh protocol server support for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import os
12 import sys
11 import sys
13
12
14 from .i18n import _
13 from .i18n import _
15 from . import (
14 from . import (
15 encoding,
16 error,
16 error,
17 hook,
17 hook,
18 util,
18 util,
19 wireproto,
19 wireproto,
20 )
20 )
21
21
22 class sshserver(wireproto.abstractserverproto):
22 class sshserver(wireproto.abstractserverproto):
23 def __init__(self, ui, repo):
23 def __init__(self, ui, repo):
24 self.ui = ui
24 self.ui = ui
25 self.repo = repo
25 self.repo = repo
26 self.lock = None
26 self.lock = None
27 self.fin = ui.fin
27 self.fin = ui.fin
28 self.fout = ui.fout
28 self.fout = ui.fout
29 self.name = 'ssh'
29 self.name = 'ssh'
30
30
31 hook.redirect(True)
31 hook.redirect(True)
32 ui.fout = repo.ui.fout = ui.ferr
32 ui.fout = repo.ui.fout = ui.ferr
33
33
34 # Prevent insertion/deletion of CRs
34 # Prevent insertion/deletion of CRs
35 util.setbinary(self.fin)
35 util.setbinary(self.fin)
36 util.setbinary(self.fout)
36 util.setbinary(self.fout)
37
37
38 def getargs(self, args):
38 def getargs(self, args):
39 data = {}
39 data = {}
40 keys = args.split()
40 keys = args.split()
41 for n in xrange(len(keys)):
41 for n in xrange(len(keys)):
42 argline = self.fin.readline()[:-1]
42 argline = self.fin.readline()[:-1]
43 arg, l = argline.split()
43 arg, l = argline.split()
44 if arg not in keys:
44 if arg not in keys:
45 raise error.Abort(_("unexpected parameter %r") % arg)
45 raise error.Abort(_("unexpected parameter %r") % arg)
46 if arg == '*':
46 if arg == '*':
47 star = {}
47 star = {}
48 for k in xrange(int(l)):
48 for k in xrange(int(l)):
49 argline = self.fin.readline()[:-1]
49 argline = self.fin.readline()[:-1]
50 arg, l = argline.split()
50 arg, l = argline.split()
51 val = self.fin.read(int(l))
51 val = self.fin.read(int(l))
52 star[arg] = val
52 star[arg] = val
53 data['*'] = star
53 data['*'] = star
54 else:
54 else:
55 val = self.fin.read(int(l))
55 val = self.fin.read(int(l))
56 data[arg] = val
56 data[arg] = val
57 return [data[k] for k in keys]
57 return [data[k] for k in keys]
58
58
59 def getarg(self, name):
59 def getarg(self, name):
60 return self.getargs(name)[0]
60 return self.getargs(name)[0]
61
61
62 def getfile(self, fpout):
62 def getfile(self, fpout):
63 self.sendresponse('')
63 self.sendresponse('')
64 count = int(self.fin.readline())
64 count = int(self.fin.readline())
65 while count:
65 while count:
66 fpout.write(self.fin.read(count))
66 fpout.write(self.fin.read(count))
67 count = int(self.fin.readline())
67 count = int(self.fin.readline())
68
68
69 def redirect(self):
69 def redirect(self):
70 pass
70 pass
71
71
72 def sendresponse(self, v):
72 def sendresponse(self, v):
73 self.fout.write("%d\n" % len(v))
73 self.fout.write("%d\n" % len(v))
74 self.fout.write(v)
74 self.fout.write(v)
75 self.fout.flush()
75 self.fout.flush()
76
76
77 def sendstream(self, source):
77 def sendstream(self, source):
78 write = self.fout.write
78 write = self.fout.write
79
79
80 if source.reader:
80 if source.reader:
81 gen = iter(lambda: source.reader.read(4096), '')
81 gen = iter(lambda: source.reader.read(4096), '')
82 else:
82 else:
83 gen = source.gen
83 gen = source.gen
84
84
85 for chunk in gen:
85 for chunk in gen:
86 write(chunk)
86 write(chunk)
87 self.fout.flush()
87 self.fout.flush()
88
88
89 def sendpushresponse(self, rsp):
89 def sendpushresponse(self, rsp):
90 self.sendresponse('')
90 self.sendresponse('')
91 self.sendresponse(str(rsp.res))
91 self.sendresponse(str(rsp.res))
92
92
93 def sendpusherror(self, rsp):
93 def sendpusherror(self, rsp):
94 self.sendresponse(rsp.res)
94 self.sendresponse(rsp.res)
95
95
96 def sendooberror(self, rsp):
96 def sendooberror(self, rsp):
97 self.ui.ferr.write('%s\n-\n' % rsp.message)
97 self.ui.ferr.write('%s\n-\n' % rsp.message)
98 self.ui.ferr.flush()
98 self.ui.ferr.flush()
99 self.fout.write('\n')
99 self.fout.write('\n')
100 self.fout.flush()
100 self.fout.flush()
101
101
102 def serve_forever(self):
102 def serve_forever(self):
103 try:
103 try:
104 while self.serve_one():
104 while self.serve_one():
105 pass
105 pass
106 finally:
106 finally:
107 if self.lock is not None:
107 if self.lock is not None:
108 self.lock.release()
108 self.lock.release()
109 sys.exit(0)
109 sys.exit(0)
110
110
111 handlers = {
111 handlers = {
112 str: sendresponse,
112 str: sendresponse,
113 wireproto.streamres: sendstream,
113 wireproto.streamres: sendstream,
114 wireproto.pushres: sendpushresponse,
114 wireproto.pushres: sendpushresponse,
115 wireproto.pusherr: sendpusherror,
115 wireproto.pusherr: sendpusherror,
116 wireproto.ooberror: sendooberror,
116 wireproto.ooberror: sendooberror,
117 }
117 }
118
118
119 def serve_one(self):
119 def serve_one(self):
120 cmd = self.fin.readline()[:-1]
120 cmd = self.fin.readline()[:-1]
121 if cmd and cmd in wireproto.commands:
121 if cmd and cmd in wireproto.commands:
122 rsp = wireproto.dispatch(self.repo, self, cmd)
122 rsp = wireproto.dispatch(self.repo, self, cmd)
123 self.handlers[rsp.__class__](self, rsp)
123 self.handlers[rsp.__class__](self, rsp)
124 elif cmd:
124 elif cmd:
125 impl = getattr(self, 'do_' + cmd, None)
125 impl = getattr(self, 'do_' + cmd, None)
126 if impl:
126 if impl:
127 r = impl()
127 r = impl()
128 if r is not None:
128 if r is not None:
129 self.sendresponse(r)
129 self.sendresponse(r)
130 else: self.sendresponse("")
130 else: self.sendresponse("")
131 return cmd != ''
131 return cmd != ''
132
132
133 def _client(self):
133 def _client(self):
134 client = os.environ.get('SSH_CLIENT', '').split(' ', 1)[0]
134 client = encoding.environ.get('SSH_CLIENT', '').split(' ', 1)[0]
135 return 'remote:ssh:' + client
135 return 'remote:ssh:' + client
@@ -1,1959 +1,1960 b''
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import stat
16 import stat
17 import subprocess
17 import subprocess
18 import sys
18 import sys
19 import tarfile
19 import tarfile
20 import xml.dom.minidom
20 import xml.dom.minidom
21
21
22
22
23 from .i18n import _
23 from .i18n import _
24 from . import (
24 from . import (
25 cmdutil,
25 cmdutil,
26 config,
26 config,
27 encoding,
27 error,
28 error,
28 exchange,
29 exchange,
29 filemerge,
30 filemerge,
30 match as matchmod,
31 match as matchmod,
31 node,
32 node,
32 pathutil,
33 pathutil,
33 phases,
34 phases,
34 pycompat,
35 pycompat,
35 scmutil,
36 scmutil,
36 util,
37 util,
37 )
38 )
38
39
39 hg = None
40 hg = None
40 propertycache = util.propertycache
41 propertycache = util.propertycache
41
42
42 nullstate = ('', '', 'empty')
43 nullstate = ('', '', 'empty')
43
44
44 def _expandedabspath(path):
45 def _expandedabspath(path):
45 '''
46 '''
46 get a path or url and if it is a path expand it and return an absolute path
47 get a path or url and if it is a path expand it and return an absolute path
47 '''
48 '''
48 expandedpath = util.urllocalpath(util.expandpath(path))
49 expandedpath = util.urllocalpath(util.expandpath(path))
49 u = util.url(expandedpath)
50 u = util.url(expandedpath)
50 if not u.scheme:
51 if not u.scheme:
51 path = util.normpath(os.path.abspath(u.path))
52 path = util.normpath(os.path.abspath(u.path))
52 return path
53 return path
53
54
54 def _getstorehashcachename(remotepath):
55 def _getstorehashcachename(remotepath):
55 '''get a unique filename for the store hash cache of a remote repository'''
56 '''get a unique filename for the store hash cache of a remote repository'''
56 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
57 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
57
58
58 class SubrepoAbort(error.Abort):
59 class SubrepoAbort(error.Abort):
59 """Exception class used to avoid handling a subrepo error more than once"""
60 """Exception class used to avoid handling a subrepo error more than once"""
60 def __init__(self, *args, **kw):
61 def __init__(self, *args, **kw):
61 self.subrepo = kw.pop('subrepo', None)
62 self.subrepo = kw.pop('subrepo', None)
62 self.cause = kw.pop('cause', None)
63 self.cause = kw.pop('cause', None)
63 error.Abort.__init__(self, *args, **kw)
64 error.Abort.__init__(self, *args, **kw)
64
65
65 def annotatesubrepoerror(func):
66 def annotatesubrepoerror(func):
66 def decoratedmethod(self, *args, **kargs):
67 def decoratedmethod(self, *args, **kargs):
67 try:
68 try:
68 res = func(self, *args, **kargs)
69 res = func(self, *args, **kargs)
69 except SubrepoAbort as ex:
70 except SubrepoAbort as ex:
70 # This exception has already been handled
71 # This exception has already been handled
71 raise ex
72 raise ex
72 except error.Abort as ex:
73 except error.Abort as ex:
73 subrepo = subrelpath(self)
74 subrepo = subrelpath(self)
74 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
75 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
75 # avoid handling this exception by raising a SubrepoAbort exception
76 # avoid handling this exception by raising a SubrepoAbort exception
76 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
77 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
77 cause=sys.exc_info())
78 cause=sys.exc_info())
78 return res
79 return res
79 return decoratedmethod
80 return decoratedmethod
80
81
81 def state(ctx, ui):
82 def state(ctx, ui):
82 """return a state dict, mapping subrepo paths configured in .hgsub
83 """return a state dict, mapping subrepo paths configured in .hgsub
83 to tuple: (source from .hgsub, revision from .hgsubstate, kind
84 to tuple: (source from .hgsub, revision from .hgsubstate, kind
84 (key in types dict))
85 (key in types dict))
85 """
86 """
86 p = config.config()
87 p = config.config()
87 repo = ctx.repo()
88 repo = ctx.repo()
88 def read(f, sections=None, remap=None):
89 def read(f, sections=None, remap=None):
89 if f in ctx:
90 if f in ctx:
90 try:
91 try:
91 data = ctx[f].data()
92 data = ctx[f].data()
92 except IOError as err:
93 except IOError as err:
93 if err.errno != errno.ENOENT:
94 if err.errno != errno.ENOENT:
94 raise
95 raise
95 # handle missing subrepo spec files as removed
96 # handle missing subrepo spec files as removed
96 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
97 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
97 repo.pathto(f))
98 repo.pathto(f))
98 return
99 return
99 p.parse(f, data, sections, remap, read)
100 p.parse(f, data, sections, remap, read)
100 else:
101 else:
101 raise error.Abort(_("subrepo spec file \'%s\' not found") %
102 raise error.Abort(_("subrepo spec file \'%s\' not found") %
102 repo.pathto(f))
103 repo.pathto(f))
103 if '.hgsub' in ctx:
104 if '.hgsub' in ctx:
104 read('.hgsub')
105 read('.hgsub')
105
106
106 for path, src in ui.configitems('subpaths'):
107 for path, src in ui.configitems('subpaths'):
107 p.set('subpaths', path, src, ui.configsource('subpaths', path))
108 p.set('subpaths', path, src, ui.configsource('subpaths', path))
108
109
109 rev = {}
110 rev = {}
110 if '.hgsubstate' in ctx:
111 if '.hgsubstate' in ctx:
111 try:
112 try:
112 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
113 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
113 l = l.lstrip()
114 l = l.lstrip()
114 if not l:
115 if not l:
115 continue
116 continue
116 try:
117 try:
117 revision, path = l.split(" ", 1)
118 revision, path = l.split(" ", 1)
118 except ValueError:
119 except ValueError:
119 raise error.Abort(_("invalid subrepository revision "
120 raise error.Abort(_("invalid subrepository revision "
120 "specifier in \'%s\' line %d")
121 "specifier in \'%s\' line %d")
121 % (repo.pathto('.hgsubstate'), (i + 1)))
122 % (repo.pathto('.hgsubstate'), (i + 1)))
122 rev[path] = revision
123 rev[path] = revision
123 except IOError as err:
124 except IOError as err:
124 if err.errno != errno.ENOENT:
125 if err.errno != errno.ENOENT:
125 raise
126 raise
126
127
127 def remap(src):
128 def remap(src):
128 for pattern, repl in p.items('subpaths'):
129 for pattern, repl in p.items('subpaths'):
129 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
130 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
130 # does a string decode.
131 # does a string decode.
131 repl = repl.encode('string-escape')
132 repl = repl.encode('string-escape')
132 # However, we still want to allow back references to go
133 # However, we still want to allow back references to go
133 # through unharmed, so we turn r'\\1' into r'\1'. Again,
134 # through unharmed, so we turn r'\\1' into r'\1'. Again,
134 # extra escapes are needed because re.sub string decodes.
135 # extra escapes are needed because re.sub string decodes.
135 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
136 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
136 try:
137 try:
137 src = re.sub(pattern, repl, src, 1)
138 src = re.sub(pattern, repl, src, 1)
138 except re.error as e:
139 except re.error as e:
139 raise error.Abort(_("bad subrepository pattern in %s: %s")
140 raise error.Abort(_("bad subrepository pattern in %s: %s")
140 % (p.source('subpaths', pattern), e))
141 % (p.source('subpaths', pattern), e))
141 return src
142 return src
142
143
143 state = {}
144 state = {}
144 for path, src in p[''].items():
145 for path, src in p[''].items():
145 kind = 'hg'
146 kind = 'hg'
146 if src.startswith('['):
147 if src.startswith('['):
147 if ']' not in src:
148 if ']' not in src:
148 raise error.Abort(_('missing ] in subrepo source'))
149 raise error.Abort(_('missing ] in subrepo source'))
149 kind, src = src.split(']', 1)
150 kind, src = src.split(']', 1)
150 kind = kind[1:]
151 kind = kind[1:]
151 src = src.lstrip() # strip any extra whitespace after ']'
152 src = src.lstrip() # strip any extra whitespace after ']'
152
153
153 if not util.url(src).isabs():
154 if not util.url(src).isabs():
154 parent = _abssource(repo, abort=False)
155 parent = _abssource(repo, abort=False)
155 if parent:
156 if parent:
156 parent = util.url(parent)
157 parent = util.url(parent)
157 parent.path = posixpath.join(parent.path or '', src)
158 parent.path = posixpath.join(parent.path or '', src)
158 parent.path = posixpath.normpath(parent.path)
159 parent.path = posixpath.normpath(parent.path)
159 joined = str(parent)
160 joined = str(parent)
160 # Remap the full joined path and use it if it changes,
161 # Remap the full joined path and use it if it changes,
161 # else remap the original source.
162 # else remap the original source.
162 remapped = remap(joined)
163 remapped = remap(joined)
163 if remapped == joined:
164 if remapped == joined:
164 src = remap(src)
165 src = remap(src)
165 else:
166 else:
166 src = remapped
167 src = remapped
167
168
168 src = remap(src)
169 src = remap(src)
169 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
170 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
170
171
171 return state
172 return state
172
173
173 def writestate(repo, state):
174 def writestate(repo, state):
174 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
175 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
175 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
176 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
176 if state[s][1] != nullstate[1]]
177 if state[s][1] != nullstate[1]]
177 repo.wwrite('.hgsubstate', ''.join(lines), '')
178 repo.wwrite('.hgsubstate', ''.join(lines), '')
178
179
179 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
180 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
180 """delegated from merge.applyupdates: merging of .hgsubstate file
181 """delegated from merge.applyupdates: merging of .hgsubstate file
181 in working context, merging context and ancestor context"""
182 in working context, merging context and ancestor context"""
182 if mctx == actx: # backwards?
183 if mctx == actx: # backwards?
183 actx = wctx.p1()
184 actx = wctx.p1()
184 s1 = wctx.substate
185 s1 = wctx.substate
185 s2 = mctx.substate
186 s2 = mctx.substate
186 sa = actx.substate
187 sa = actx.substate
187 sm = {}
188 sm = {}
188
189
189 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
190 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
190
191
191 def debug(s, msg, r=""):
192 def debug(s, msg, r=""):
192 if r:
193 if r:
193 r = "%s:%s:%s" % r
194 r = "%s:%s:%s" % r
194 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
195 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
195
196
196 for s, l in sorted(s1.iteritems()):
197 for s, l in sorted(s1.iteritems()):
197 a = sa.get(s, nullstate)
198 a = sa.get(s, nullstate)
198 ld = l # local state with possible dirty flag for compares
199 ld = l # local state with possible dirty flag for compares
199 if wctx.sub(s).dirty():
200 if wctx.sub(s).dirty():
200 ld = (l[0], l[1] + "+")
201 ld = (l[0], l[1] + "+")
201 if wctx == actx: # overwrite
202 if wctx == actx: # overwrite
202 a = ld
203 a = ld
203
204
204 if s in s2:
205 if s in s2:
205 prompts = filemerge.partextras(labels)
206 prompts = filemerge.partextras(labels)
206 prompts['s'] = s
207 prompts['s'] = s
207 r = s2[s]
208 r = s2[s]
208 if ld == r or r == a: # no change or local is newer
209 if ld == r or r == a: # no change or local is newer
209 sm[s] = l
210 sm[s] = l
210 continue
211 continue
211 elif ld == a: # other side changed
212 elif ld == a: # other side changed
212 debug(s, "other changed, get", r)
213 debug(s, "other changed, get", r)
213 wctx.sub(s).get(r, overwrite)
214 wctx.sub(s).get(r, overwrite)
214 sm[s] = r
215 sm[s] = r
215 elif ld[0] != r[0]: # sources differ
216 elif ld[0] != r[0]: # sources differ
216 prompts['lo'] = l[0]
217 prompts['lo'] = l[0]
217 prompts['ro'] = r[0]
218 prompts['ro'] = r[0]
218 if repo.ui.promptchoice(
219 if repo.ui.promptchoice(
219 _(' subrepository sources for %(s)s differ\n'
220 _(' subrepository sources for %(s)s differ\n'
220 'use (l)ocal%(l)s source (%(lo)s)'
221 'use (l)ocal%(l)s source (%(lo)s)'
221 ' or (r)emote%(o)s source (%(ro)s)?'
222 ' or (r)emote%(o)s source (%(ro)s)?'
222 '$$ &Local $$ &Remote') % prompts, 0):
223 '$$ &Local $$ &Remote') % prompts, 0):
223 debug(s, "prompt changed, get", r)
224 debug(s, "prompt changed, get", r)
224 wctx.sub(s).get(r, overwrite)
225 wctx.sub(s).get(r, overwrite)
225 sm[s] = r
226 sm[s] = r
226 elif ld[1] == a[1]: # local side is unchanged
227 elif ld[1] == a[1]: # local side is unchanged
227 debug(s, "other side changed, get", r)
228 debug(s, "other side changed, get", r)
228 wctx.sub(s).get(r, overwrite)
229 wctx.sub(s).get(r, overwrite)
229 sm[s] = r
230 sm[s] = r
230 else:
231 else:
231 debug(s, "both sides changed")
232 debug(s, "both sides changed")
232 srepo = wctx.sub(s)
233 srepo = wctx.sub(s)
233 prompts['sl'] = srepo.shortid(l[1])
234 prompts['sl'] = srepo.shortid(l[1])
234 prompts['sr'] = srepo.shortid(r[1])
235 prompts['sr'] = srepo.shortid(r[1])
235 option = repo.ui.promptchoice(
236 option = repo.ui.promptchoice(
236 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
237 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
237 'remote revision: %(sr)s)\n'
238 'remote revision: %(sr)s)\n'
238 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
239 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
239 '$$ &Merge $$ &Local $$ &Remote')
240 '$$ &Merge $$ &Local $$ &Remote')
240 % prompts, 0)
241 % prompts, 0)
241 if option == 0:
242 if option == 0:
242 wctx.sub(s).merge(r)
243 wctx.sub(s).merge(r)
243 sm[s] = l
244 sm[s] = l
244 debug(s, "merge with", r)
245 debug(s, "merge with", r)
245 elif option == 1:
246 elif option == 1:
246 sm[s] = l
247 sm[s] = l
247 debug(s, "keep local subrepo revision", l)
248 debug(s, "keep local subrepo revision", l)
248 else:
249 else:
249 wctx.sub(s).get(r, overwrite)
250 wctx.sub(s).get(r, overwrite)
250 sm[s] = r
251 sm[s] = r
251 debug(s, "get remote subrepo revision", r)
252 debug(s, "get remote subrepo revision", r)
252 elif ld == a: # remote removed, local unchanged
253 elif ld == a: # remote removed, local unchanged
253 debug(s, "remote removed, remove")
254 debug(s, "remote removed, remove")
254 wctx.sub(s).remove()
255 wctx.sub(s).remove()
255 elif a == nullstate: # not present in remote or ancestor
256 elif a == nullstate: # not present in remote or ancestor
256 debug(s, "local added, keep")
257 debug(s, "local added, keep")
257 sm[s] = l
258 sm[s] = l
258 continue
259 continue
259 else:
260 else:
260 if repo.ui.promptchoice(
261 if repo.ui.promptchoice(
261 _(' local%(l)s changed subrepository %(s)s'
262 _(' local%(l)s changed subrepository %(s)s'
262 ' which remote%(o)s removed\n'
263 ' which remote%(o)s removed\n'
263 'use (c)hanged version or (d)elete?'
264 'use (c)hanged version or (d)elete?'
264 '$$ &Changed $$ &Delete') % prompts, 0):
265 '$$ &Changed $$ &Delete') % prompts, 0):
265 debug(s, "prompt remove")
266 debug(s, "prompt remove")
266 wctx.sub(s).remove()
267 wctx.sub(s).remove()
267
268
268 for s, r in sorted(s2.items()):
269 for s, r in sorted(s2.items()):
269 if s in s1:
270 if s in s1:
270 continue
271 continue
271 elif s not in sa:
272 elif s not in sa:
272 debug(s, "remote added, get", r)
273 debug(s, "remote added, get", r)
273 mctx.sub(s).get(r)
274 mctx.sub(s).get(r)
274 sm[s] = r
275 sm[s] = r
275 elif r != sa[s]:
276 elif r != sa[s]:
276 if repo.ui.promptchoice(
277 if repo.ui.promptchoice(
277 _(' remote%(o)s changed subrepository %(s)s'
278 _(' remote%(o)s changed subrepository %(s)s'
278 ' which local%(l)s removed\n'
279 ' which local%(l)s removed\n'
279 'use (c)hanged version or (d)elete?'
280 'use (c)hanged version or (d)elete?'
280 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
281 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
281 debug(s, "prompt recreate", r)
282 debug(s, "prompt recreate", r)
282 mctx.sub(s).get(r)
283 mctx.sub(s).get(r)
283 sm[s] = r
284 sm[s] = r
284
285
285 # record merged .hgsubstate
286 # record merged .hgsubstate
286 writestate(repo, sm)
287 writestate(repo, sm)
287 return sm
288 return sm
288
289
289 def _updateprompt(ui, sub, dirty, local, remote):
290 def _updateprompt(ui, sub, dirty, local, remote):
290 if dirty:
291 if dirty:
291 msg = (_(' subrepository sources for %s differ\n'
292 msg = (_(' subrepository sources for %s differ\n'
292 'use (l)ocal source (%s) or (r)emote source (%s)?'
293 'use (l)ocal source (%s) or (r)emote source (%s)?'
293 '$$ &Local $$ &Remote')
294 '$$ &Local $$ &Remote')
294 % (subrelpath(sub), local, remote))
295 % (subrelpath(sub), local, remote))
295 else:
296 else:
296 msg = (_(' subrepository sources for %s differ (in checked out '
297 msg = (_(' subrepository sources for %s differ (in checked out '
297 'version)\n'
298 'version)\n'
298 'use (l)ocal source (%s) or (r)emote source (%s)?'
299 'use (l)ocal source (%s) or (r)emote source (%s)?'
299 '$$ &Local $$ &Remote')
300 '$$ &Local $$ &Remote')
300 % (subrelpath(sub), local, remote))
301 % (subrelpath(sub), local, remote))
301 return ui.promptchoice(msg, 0)
302 return ui.promptchoice(msg, 0)
302
303
303 def reporelpath(repo):
304 def reporelpath(repo):
304 """return path to this (sub)repo as seen from outermost repo"""
305 """return path to this (sub)repo as seen from outermost repo"""
305 parent = repo
306 parent = repo
306 while util.safehasattr(parent, '_subparent'):
307 while util.safehasattr(parent, '_subparent'):
307 parent = parent._subparent
308 parent = parent._subparent
308 return repo.root[len(pathutil.normasprefix(parent.root)):]
309 return repo.root[len(pathutil.normasprefix(parent.root)):]
309
310
310 def subrelpath(sub):
311 def subrelpath(sub):
311 """return path to this subrepo as seen from outermost repo"""
312 """return path to this subrepo as seen from outermost repo"""
312 return sub._relpath
313 return sub._relpath
313
314
314 def _abssource(repo, push=False, abort=True):
315 def _abssource(repo, push=False, abort=True):
315 """return pull/push path of repo - either based on parent repo .hgsub info
316 """return pull/push path of repo - either based on parent repo .hgsub info
316 or on the top repo config. Abort or return None if no source found."""
317 or on the top repo config. Abort or return None if no source found."""
317 if util.safehasattr(repo, '_subparent'):
318 if util.safehasattr(repo, '_subparent'):
318 source = util.url(repo._subsource)
319 source = util.url(repo._subsource)
319 if source.isabs():
320 if source.isabs():
320 return str(source)
321 return str(source)
321 source.path = posixpath.normpath(source.path)
322 source.path = posixpath.normpath(source.path)
322 parent = _abssource(repo._subparent, push, abort=False)
323 parent = _abssource(repo._subparent, push, abort=False)
323 if parent:
324 if parent:
324 parent = util.url(util.pconvert(parent))
325 parent = util.url(util.pconvert(parent))
325 parent.path = posixpath.join(parent.path or '', source.path)
326 parent.path = posixpath.join(parent.path or '', source.path)
326 parent.path = posixpath.normpath(parent.path)
327 parent.path = posixpath.normpath(parent.path)
327 return str(parent)
328 return str(parent)
328 else: # recursion reached top repo
329 else: # recursion reached top repo
329 if util.safehasattr(repo, '_subtoppath'):
330 if util.safehasattr(repo, '_subtoppath'):
330 return repo._subtoppath
331 return repo._subtoppath
331 if push and repo.ui.config('paths', 'default-push'):
332 if push and repo.ui.config('paths', 'default-push'):
332 return repo.ui.config('paths', 'default-push')
333 return repo.ui.config('paths', 'default-push')
333 if repo.ui.config('paths', 'default'):
334 if repo.ui.config('paths', 'default'):
334 return repo.ui.config('paths', 'default')
335 return repo.ui.config('paths', 'default')
335 if repo.shared():
336 if repo.shared():
336 # chop off the .hg component to get the default path form
337 # chop off the .hg component to get the default path form
337 return os.path.dirname(repo.sharedpath)
338 return os.path.dirname(repo.sharedpath)
338 if abort:
339 if abort:
339 raise error.Abort(_("default path for subrepository not found"))
340 raise error.Abort(_("default path for subrepository not found"))
340
341
341 def _sanitize(ui, vfs, ignore):
342 def _sanitize(ui, vfs, ignore):
342 for dirname, dirs, names in vfs.walk():
343 for dirname, dirs, names in vfs.walk():
343 for i, d in enumerate(dirs):
344 for i, d in enumerate(dirs):
344 if d.lower() == ignore:
345 if d.lower() == ignore:
345 del dirs[i]
346 del dirs[i]
346 break
347 break
347 if vfs.basename(dirname).lower() != '.hg':
348 if vfs.basename(dirname).lower() != '.hg':
348 continue
349 continue
349 for f in names:
350 for f in names:
350 if f.lower() == 'hgrc':
351 if f.lower() == 'hgrc':
351 ui.warn(_("warning: removing potentially hostile 'hgrc' "
352 ui.warn(_("warning: removing potentially hostile 'hgrc' "
352 "in '%s'\n") % vfs.join(dirname))
353 "in '%s'\n") % vfs.join(dirname))
353 vfs.unlink(vfs.reljoin(dirname, f))
354 vfs.unlink(vfs.reljoin(dirname, f))
354
355
355 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
356 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
356 """return instance of the right subrepo class for subrepo in path"""
357 """return instance of the right subrepo class for subrepo in path"""
357 # subrepo inherently violates our import layering rules
358 # subrepo inherently violates our import layering rules
358 # because it wants to make repo objects from deep inside the stack
359 # because it wants to make repo objects from deep inside the stack
359 # so we manually delay the circular imports to not break
360 # so we manually delay the circular imports to not break
360 # scripts that don't use our demand-loading
361 # scripts that don't use our demand-loading
361 global hg
362 global hg
362 from . import hg as h
363 from . import hg as h
363 hg = h
364 hg = h
364
365
365 pathutil.pathauditor(ctx.repo().root)(path)
366 pathutil.pathauditor(ctx.repo().root)(path)
366 state = ctx.substate[path]
367 state = ctx.substate[path]
367 if state[2] not in types:
368 if state[2] not in types:
368 raise error.Abort(_('unknown subrepo type %s') % state[2])
369 raise error.Abort(_('unknown subrepo type %s') % state[2])
369 if allowwdir:
370 if allowwdir:
370 state = (state[0], ctx.subrev(path), state[2])
371 state = (state[0], ctx.subrev(path), state[2])
371 return types[state[2]](ctx, path, state[:2], allowcreate)
372 return types[state[2]](ctx, path, state[:2], allowcreate)
372
373
373 def nullsubrepo(ctx, path, pctx):
374 def nullsubrepo(ctx, path, pctx):
374 """return an empty subrepo in pctx for the extant subrepo in ctx"""
375 """return an empty subrepo in pctx for the extant subrepo in ctx"""
375 # subrepo inherently violates our import layering rules
376 # subrepo inherently violates our import layering rules
376 # because it wants to make repo objects from deep inside the stack
377 # because it wants to make repo objects from deep inside the stack
377 # so we manually delay the circular imports to not break
378 # so we manually delay the circular imports to not break
378 # scripts that don't use our demand-loading
379 # scripts that don't use our demand-loading
379 global hg
380 global hg
380 from . import hg as h
381 from . import hg as h
381 hg = h
382 hg = h
382
383
383 pathutil.pathauditor(ctx.repo().root)(path)
384 pathutil.pathauditor(ctx.repo().root)(path)
384 state = ctx.substate[path]
385 state = ctx.substate[path]
385 if state[2] not in types:
386 if state[2] not in types:
386 raise error.Abort(_('unknown subrepo type %s') % state[2])
387 raise error.Abort(_('unknown subrepo type %s') % state[2])
387 subrev = ''
388 subrev = ''
388 if state[2] == 'hg':
389 if state[2] == 'hg':
389 subrev = "0" * 40
390 subrev = "0" * 40
390 return types[state[2]](pctx, path, (state[0], subrev), True)
391 return types[state[2]](pctx, path, (state[0], subrev), True)
391
392
392 def newcommitphase(ui, ctx):
393 def newcommitphase(ui, ctx):
393 commitphase = phases.newcommitphase(ui)
394 commitphase = phases.newcommitphase(ui)
394 substate = getattr(ctx, "substate", None)
395 substate = getattr(ctx, "substate", None)
395 if not substate:
396 if not substate:
396 return commitphase
397 return commitphase
397 check = ui.config('phases', 'checksubrepos', 'follow')
398 check = ui.config('phases', 'checksubrepos', 'follow')
398 if check not in ('ignore', 'follow', 'abort'):
399 if check not in ('ignore', 'follow', 'abort'):
399 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
400 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
400 % (check))
401 % (check))
401 if check == 'ignore':
402 if check == 'ignore':
402 return commitphase
403 return commitphase
403 maxphase = phases.public
404 maxphase = phases.public
404 maxsub = None
405 maxsub = None
405 for s in sorted(substate):
406 for s in sorted(substate):
406 sub = ctx.sub(s)
407 sub = ctx.sub(s)
407 subphase = sub.phase(substate[s][1])
408 subphase = sub.phase(substate[s][1])
408 if maxphase < subphase:
409 if maxphase < subphase:
409 maxphase = subphase
410 maxphase = subphase
410 maxsub = s
411 maxsub = s
411 if commitphase < maxphase:
412 if commitphase < maxphase:
412 if check == 'abort':
413 if check == 'abort':
413 raise error.Abort(_("can't commit in %s phase"
414 raise error.Abort(_("can't commit in %s phase"
414 " conflicting %s from subrepository %s") %
415 " conflicting %s from subrepository %s") %
415 (phases.phasenames[commitphase],
416 (phases.phasenames[commitphase],
416 phases.phasenames[maxphase], maxsub))
417 phases.phasenames[maxphase], maxsub))
417 ui.warn(_("warning: changes are committed in"
418 ui.warn(_("warning: changes are committed in"
418 " %s phase from subrepository %s\n") %
419 " %s phase from subrepository %s\n") %
419 (phases.phasenames[maxphase], maxsub))
420 (phases.phasenames[maxphase], maxsub))
420 return maxphase
421 return maxphase
421 return commitphase
422 return commitphase
422
423
423 # subrepo classes need to implement the following abstract class:
424 # subrepo classes need to implement the following abstract class:
424
425
425 class abstractsubrepo(object):
426 class abstractsubrepo(object):
426
427
427 def __init__(self, ctx, path):
428 def __init__(self, ctx, path):
428 """Initialize abstractsubrepo part
429 """Initialize abstractsubrepo part
429
430
430 ``ctx`` is the context referring this subrepository in the
431 ``ctx`` is the context referring this subrepository in the
431 parent repository.
432 parent repository.
432
433
433 ``path`` is the path to this subrepository as seen from
434 ``path`` is the path to this subrepository as seen from
434 innermost repository.
435 innermost repository.
435 """
436 """
436 self.ui = ctx.repo().ui
437 self.ui = ctx.repo().ui
437 self._ctx = ctx
438 self._ctx = ctx
438 self._path = path
439 self._path = path
439
440
440 def storeclean(self, path):
441 def storeclean(self, path):
441 """
442 """
442 returns true if the repository has not changed since it was last
443 returns true if the repository has not changed since it was last
443 cloned from or pushed to a given repository.
444 cloned from or pushed to a given repository.
444 """
445 """
445 return False
446 return False
446
447
447 def dirty(self, ignoreupdate=False):
448 def dirty(self, ignoreupdate=False):
448 """returns true if the dirstate of the subrepo is dirty or does not
449 """returns true if the dirstate of the subrepo is dirty or does not
449 match current stored state. If ignoreupdate is true, only check
450 match current stored state. If ignoreupdate is true, only check
450 whether the subrepo has uncommitted changes in its dirstate.
451 whether the subrepo has uncommitted changes in its dirstate.
451 """
452 """
452 raise NotImplementedError
453 raise NotImplementedError
453
454
454 def dirtyreason(self, ignoreupdate=False):
455 def dirtyreason(self, ignoreupdate=False):
455 """return reason string if it is ``dirty()``
456 """return reason string if it is ``dirty()``
456
457
457 Returned string should have enough information for the message
458 Returned string should have enough information for the message
458 of exception.
459 of exception.
459
460
460 This returns None, otherwise.
461 This returns None, otherwise.
461 """
462 """
462 if self.dirty(ignoreupdate=ignoreupdate):
463 if self.dirty(ignoreupdate=ignoreupdate):
463 return _("uncommitted changes in subrepository '%s'"
464 return _("uncommitted changes in subrepository '%s'"
464 ) % subrelpath(self)
465 ) % subrelpath(self)
465
466
466 def bailifchanged(self, ignoreupdate=False):
467 def bailifchanged(self, ignoreupdate=False):
467 """raise Abort if subrepository is ``dirty()``
468 """raise Abort if subrepository is ``dirty()``
468 """
469 """
469 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
470 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
470 if dirtyreason:
471 if dirtyreason:
471 raise error.Abort(dirtyreason)
472 raise error.Abort(dirtyreason)
472
473
473 def basestate(self):
474 def basestate(self):
474 """current working directory base state, disregarding .hgsubstate
475 """current working directory base state, disregarding .hgsubstate
475 state and working directory modifications"""
476 state and working directory modifications"""
476 raise NotImplementedError
477 raise NotImplementedError
477
478
478 def checknested(self, path):
479 def checknested(self, path):
479 """check if path is a subrepository within this repository"""
480 """check if path is a subrepository within this repository"""
480 return False
481 return False
481
482
482 def commit(self, text, user, date):
483 def commit(self, text, user, date):
483 """commit the current changes to the subrepo with the given
484 """commit the current changes to the subrepo with the given
484 log message. Use given user and date if possible. Return the
485 log message. Use given user and date if possible. Return the
485 new state of the subrepo.
486 new state of the subrepo.
486 """
487 """
487 raise NotImplementedError
488 raise NotImplementedError
488
489
489 def phase(self, state):
490 def phase(self, state):
490 """returns phase of specified state in the subrepository.
491 """returns phase of specified state in the subrepository.
491 """
492 """
492 return phases.public
493 return phases.public
493
494
494 def remove(self):
495 def remove(self):
495 """remove the subrepo
496 """remove the subrepo
496
497
497 (should verify the dirstate is not dirty first)
498 (should verify the dirstate is not dirty first)
498 """
499 """
499 raise NotImplementedError
500 raise NotImplementedError
500
501
501 def get(self, state, overwrite=False):
502 def get(self, state, overwrite=False):
502 """run whatever commands are needed to put the subrepo into
503 """run whatever commands are needed to put the subrepo into
503 this state
504 this state
504 """
505 """
505 raise NotImplementedError
506 raise NotImplementedError
506
507
507 def merge(self, state):
508 def merge(self, state):
508 """merge currently-saved state with the new state."""
509 """merge currently-saved state with the new state."""
509 raise NotImplementedError
510 raise NotImplementedError
510
511
511 def push(self, opts):
512 def push(self, opts):
512 """perform whatever action is analogous to 'hg push'
513 """perform whatever action is analogous to 'hg push'
513
514
514 This may be a no-op on some systems.
515 This may be a no-op on some systems.
515 """
516 """
516 raise NotImplementedError
517 raise NotImplementedError
517
518
518 def add(self, ui, match, prefix, explicitonly, **opts):
519 def add(self, ui, match, prefix, explicitonly, **opts):
519 return []
520 return []
520
521
521 def addremove(self, matcher, prefix, opts, dry_run, similarity):
522 def addremove(self, matcher, prefix, opts, dry_run, similarity):
522 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
523 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
523 return 1
524 return 1
524
525
525 def cat(self, match, prefix, **opts):
526 def cat(self, match, prefix, **opts):
526 return 1
527 return 1
527
528
528 def status(self, rev2, **opts):
529 def status(self, rev2, **opts):
529 return scmutil.status([], [], [], [], [], [], [])
530 return scmutil.status([], [], [], [], [], [], [])
530
531
531 def diff(self, ui, diffopts, node2, match, prefix, **opts):
532 def diff(self, ui, diffopts, node2, match, prefix, **opts):
532 pass
533 pass
533
534
534 def outgoing(self, ui, dest, opts):
535 def outgoing(self, ui, dest, opts):
535 return 1
536 return 1
536
537
537 def incoming(self, ui, source, opts):
538 def incoming(self, ui, source, opts):
538 return 1
539 return 1
539
540
540 def files(self):
541 def files(self):
541 """return filename iterator"""
542 """return filename iterator"""
542 raise NotImplementedError
543 raise NotImplementedError
543
544
544 def filedata(self, name):
545 def filedata(self, name):
545 """return file data"""
546 """return file data"""
546 raise NotImplementedError
547 raise NotImplementedError
547
548
548 def fileflags(self, name):
549 def fileflags(self, name):
549 """return file flags"""
550 """return file flags"""
550 return ''
551 return ''
551
552
552 def getfileset(self, expr):
553 def getfileset(self, expr):
553 """Resolve the fileset expression for this repo"""
554 """Resolve the fileset expression for this repo"""
554 return set()
555 return set()
555
556
556 def printfiles(self, ui, m, fm, fmt, subrepos):
557 def printfiles(self, ui, m, fm, fmt, subrepos):
557 """handle the files command for this subrepo"""
558 """handle the files command for this subrepo"""
558 return 1
559 return 1
559
560
560 def archive(self, archiver, prefix, match=None):
561 def archive(self, archiver, prefix, match=None):
561 if match is not None:
562 if match is not None:
562 files = [f for f in self.files() if match(f)]
563 files = [f for f in self.files() if match(f)]
563 else:
564 else:
564 files = self.files()
565 files = self.files()
565 total = len(files)
566 total = len(files)
566 relpath = subrelpath(self)
567 relpath = subrelpath(self)
567 self.ui.progress(_('archiving (%s)') % relpath, 0,
568 self.ui.progress(_('archiving (%s)') % relpath, 0,
568 unit=_('files'), total=total)
569 unit=_('files'), total=total)
569 for i, name in enumerate(files):
570 for i, name in enumerate(files):
570 flags = self.fileflags(name)
571 flags = self.fileflags(name)
571 mode = 'x' in flags and 0o755 or 0o644
572 mode = 'x' in flags and 0o755 or 0o644
572 symlink = 'l' in flags
573 symlink = 'l' in flags
573 archiver.addfile(prefix + self._path + '/' + name,
574 archiver.addfile(prefix + self._path + '/' + name,
574 mode, symlink, self.filedata(name))
575 mode, symlink, self.filedata(name))
575 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
576 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
576 unit=_('files'), total=total)
577 unit=_('files'), total=total)
577 self.ui.progress(_('archiving (%s)') % relpath, None)
578 self.ui.progress(_('archiving (%s)') % relpath, None)
578 return total
579 return total
579
580
580 def walk(self, match):
581 def walk(self, match):
581 '''
582 '''
582 walk recursively through the directory tree, finding all files
583 walk recursively through the directory tree, finding all files
583 matched by the match function
584 matched by the match function
584 '''
585 '''
585 pass
586 pass
586
587
587 def forget(self, match, prefix):
588 def forget(self, match, prefix):
588 return ([], [])
589 return ([], [])
589
590
590 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
591 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
591 """remove the matched files from the subrepository and the filesystem,
592 """remove the matched files from the subrepository and the filesystem,
592 possibly by force and/or after the file has been removed from the
593 possibly by force and/or after the file has been removed from the
593 filesystem. Return 0 on success, 1 on any warning.
594 filesystem. Return 0 on success, 1 on any warning.
594 """
595 """
595 warnings.append(_("warning: removefiles not implemented (%s)")
596 warnings.append(_("warning: removefiles not implemented (%s)")
596 % self._path)
597 % self._path)
597 return 1
598 return 1
598
599
599 def revert(self, substate, *pats, **opts):
600 def revert(self, substate, *pats, **opts):
600 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
601 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
601 % (substate[0], substate[2]))
602 % (substate[0], substate[2]))
602 return []
603 return []
603
604
604 def shortid(self, revid):
605 def shortid(self, revid):
605 return revid
606 return revid
606
607
607 def verify(self):
608 def verify(self):
608 '''verify the integrity of the repository. Return 0 on success or
609 '''verify the integrity of the repository. Return 0 on success or
609 warning, 1 on any error.
610 warning, 1 on any error.
610 '''
611 '''
611 return 0
612 return 0
612
613
613 @propertycache
614 @propertycache
614 def wvfs(self):
615 def wvfs(self):
615 """return vfs to access the working directory of this subrepository
616 """return vfs to access the working directory of this subrepository
616 """
617 """
617 return scmutil.vfs(self._ctx.repo().wvfs.join(self._path))
618 return scmutil.vfs(self._ctx.repo().wvfs.join(self._path))
618
619
619 @propertycache
620 @propertycache
620 def _relpath(self):
621 def _relpath(self):
621 """return path to this subrepository as seen from outermost repository
622 """return path to this subrepository as seen from outermost repository
622 """
623 """
623 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
624 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
624
625
625 class hgsubrepo(abstractsubrepo):
626 class hgsubrepo(abstractsubrepo):
626 def __init__(self, ctx, path, state, allowcreate):
627 def __init__(self, ctx, path, state, allowcreate):
627 super(hgsubrepo, self).__init__(ctx, path)
628 super(hgsubrepo, self).__init__(ctx, path)
628 self._state = state
629 self._state = state
629 r = ctx.repo()
630 r = ctx.repo()
630 root = r.wjoin(path)
631 root = r.wjoin(path)
631 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
632 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
632 self._repo = hg.repository(r.baseui, root, create=create)
633 self._repo = hg.repository(r.baseui, root, create=create)
633
634
634 # Propagate the parent's --hidden option
635 # Propagate the parent's --hidden option
635 if r is r.unfiltered():
636 if r is r.unfiltered():
636 self._repo = self._repo.unfiltered()
637 self._repo = self._repo.unfiltered()
637
638
638 self.ui = self._repo.ui
639 self.ui = self._repo.ui
639 for s, k in [('ui', 'commitsubrepos')]:
640 for s, k in [('ui', 'commitsubrepos')]:
640 v = r.ui.config(s, k)
641 v = r.ui.config(s, k)
641 if v:
642 if v:
642 self.ui.setconfig(s, k, v, 'subrepo')
643 self.ui.setconfig(s, k, v, 'subrepo')
643 # internal config: ui._usedassubrepo
644 # internal config: ui._usedassubrepo
644 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
645 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
645 self._initrepo(r, state[0], create)
646 self._initrepo(r, state[0], create)
646
647
647 def storeclean(self, path):
648 def storeclean(self, path):
648 with self._repo.lock():
649 with self._repo.lock():
649 return self._storeclean(path)
650 return self._storeclean(path)
650
651
651 def _storeclean(self, path):
652 def _storeclean(self, path):
652 clean = True
653 clean = True
653 itercache = self._calcstorehash(path)
654 itercache = self._calcstorehash(path)
654 for filehash in self._readstorehashcache(path):
655 for filehash in self._readstorehashcache(path):
655 if filehash != next(itercache, None):
656 if filehash != next(itercache, None):
656 clean = False
657 clean = False
657 break
658 break
658 if clean:
659 if clean:
659 # if not empty:
660 # if not empty:
660 # the cached and current pull states have a different size
661 # the cached and current pull states have a different size
661 clean = next(itercache, None) is None
662 clean = next(itercache, None) is None
662 return clean
663 return clean
663
664
664 def _calcstorehash(self, remotepath):
665 def _calcstorehash(self, remotepath):
665 '''calculate a unique "store hash"
666 '''calculate a unique "store hash"
666
667
667 This method is used to to detect when there are changes that may
668 This method is used to to detect when there are changes that may
668 require a push to a given remote path.'''
669 require a push to a given remote path.'''
669 # sort the files that will be hashed in increasing (likely) file size
670 # sort the files that will be hashed in increasing (likely) file size
670 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
671 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
671 yield '# %s\n' % _expandedabspath(remotepath)
672 yield '# %s\n' % _expandedabspath(remotepath)
672 vfs = self._repo.vfs
673 vfs = self._repo.vfs
673 for relname in filelist:
674 for relname in filelist:
674 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
675 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
675 yield '%s = %s\n' % (relname, filehash)
676 yield '%s = %s\n' % (relname, filehash)
676
677
677 @propertycache
678 @propertycache
678 def _cachestorehashvfs(self):
679 def _cachestorehashvfs(self):
679 return scmutil.vfs(self._repo.join('cache/storehash'))
680 return scmutil.vfs(self._repo.join('cache/storehash'))
680
681
681 def _readstorehashcache(self, remotepath):
682 def _readstorehashcache(self, remotepath):
682 '''read the store hash cache for a given remote repository'''
683 '''read the store hash cache for a given remote repository'''
683 cachefile = _getstorehashcachename(remotepath)
684 cachefile = _getstorehashcachename(remotepath)
684 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
685 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
685
686
686 def _cachestorehash(self, remotepath):
687 def _cachestorehash(self, remotepath):
687 '''cache the current store hash
688 '''cache the current store hash
688
689
689 Each remote repo requires its own store hash cache, because a subrepo
690 Each remote repo requires its own store hash cache, because a subrepo
690 store may be "clean" versus a given remote repo, but not versus another
691 store may be "clean" versus a given remote repo, but not versus another
691 '''
692 '''
692 cachefile = _getstorehashcachename(remotepath)
693 cachefile = _getstorehashcachename(remotepath)
693 with self._repo.lock():
694 with self._repo.lock():
694 storehash = list(self._calcstorehash(remotepath))
695 storehash = list(self._calcstorehash(remotepath))
695 vfs = self._cachestorehashvfs
696 vfs = self._cachestorehashvfs
696 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
697 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
697
698
698 def _getctx(self):
699 def _getctx(self):
699 '''fetch the context for this subrepo revision, possibly a workingctx
700 '''fetch the context for this subrepo revision, possibly a workingctx
700 '''
701 '''
701 if self._ctx.rev() is None:
702 if self._ctx.rev() is None:
702 return self._repo[None] # workingctx if parent is workingctx
703 return self._repo[None] # workingctx if parent is workingctx
703 else:
704 else:
704 rev = self._state[1]
705 rev = self._state[1]
705 return self._repo[rev]
706 return self._repo[rev]
706
707
707 @annotatesubrepoerror
708 @annotatesubrepoerror
708 def _initrepo(self, parentrepo, source, create):
709 def _initrepo(self, parentrepo, source, create):
709 self._repo._subparent = parentrepo
710 self._repo._subparent = parentrepo
710 self._repo._subsource = source
711 self._repo._subsource = source
711
712
712 if create:
713 if create:
713 lines = ['[paths]\n']
714 lines = ['[paths]\n']
714
715
715 def addpathconfig(key, value):
716 def addpathconfig(key, value):
716 if value:
717 if value:
717 lines.append('%s = %s\n' % (key, value))
718 lines.append('%s = %s\n' % (key, value))
718 self.ui.setconfig('paths', key, value, 'subrepo')
719 self.ui.setconfig('paths', key, value, 'subrepo')
719
720
720 defpath = _abssource(self._repo, abort=False)
721 defpath = _abssource(self._repo, abort=False)
721 defpushpath = _abssource(self._repo, True, abort=False)
722 defpushpath = _abssource(self._repo, True, abort=False)
722 addpathconfig('default', defpath)
723 addpathconfig('default', defpath)
723 if defpath != defpushpath:
724 if defpath != defpushpath:
724 addpathconfig('default-push', defpushpath)
725 addpathconfig('default-push', defpushpath)
725
726
726 fp = self._repo.vfs("hgrc", "w", text=True)
727 fp = self._repo.vfs("hgrc", "w", text=True)
727 try:
728 try:
728 fp.write(''.join(lines))
729 fp.write(''.join(lines))
729 finally:
730 finally:
730 fp.close()
731 fp.close()
731
732
732 @annotatesubrepoerror
733 @annotatesubrepoerror
733 def add(self, ui, match, prefix, explicitonly, **opts):
734 def add(self, ui, match, prefix, explicitonly, **opts):
734 return cmdutil.add(ui, self._repo, match,
735 return cmdutil.add(ui, self._repo, match,
735 self.wvfs.reljoin(prefix, self._path),
736 self.wvfs.reljoin(prefix, self._path),
736 explicitonly, **opts)
737 explicitonly, **opts)
737
738
738 @annotatesubrepoerror
739 @annotatesubrepoerror
739 def addremove(self, m, prefix, opts, dry_run, similarity):
740 def addremove(self, m, prefix, opts, dry_run, similarity):
740 # In the same way as sub directories are processed, once in a subrepo,
741 # In the same way as sub directories are processed, once in a subrepo,
741 # always entry any of its subrepos. Don't corrupt the options that will
742 # always entry any of its subrepos. Don't corrupt the options that will
742 # be used to process sibling subrepos however.
743 # be used to process sibling subrepos however.
743 opts = copy.copy(opts)
744 opts = copy.copy(opts)
744 opts['subrepos'] = True
745 opts['subrepos'] = True
745 return scmutil.addremove(self._repo, m,
746 return scmutil.addremove(self._repo, m,
746 self.wvfs.reljoin(prefix, self._path), opts,
747 self.wvfs.reljoin(prefix, self._path), opts,
747 dry_run, similarity)
748 dry_run, similarity)
748
749
749 @annotatesubrepoerror
750 @annotatesubrepoerror
750 def cat(self, match, prefix, **opts):
751 def cat(self, match, prefix, **opts):
751 rev = self._state[1]
752 rev = self._state[1]
752 ctx = self._repo[rev]
753 ctx = self._repo[rev]
753 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
754 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
754
755
755 @annotatesubrepoerror
756 @annotatesubrepoerror
756 def status(self, rev2, **opts):
757 def status(self, rev2, **opts):
757 try:
758 try:
758 rev1 = self._state[1]
759 rev1 = self._state[1]
759 ctx1 = self._repo[rev1]
760 ctx1 = self._repo[rev1]
760 ctx2 = self._repo[rev2]
761 ctx2 = self._repo[rev2]
761 return self._repo.status(ctx1, ctx2, **opts)
762 return self._repo.status(ctx1, ctx2, **opts)
762 except error.RepoLookupError as inst:
763 except error.RepoLookupError as inst:
763 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
764 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
764 % (inst, subrelpath(self)))
765 % (inst, subrelpath(self)))
765 return scmutil.status([], [], [], [], [], [], [])
766 return scmutil.status([], [], [], [], [], [], [])
766
767
767 @annotatesubrepoerror
768 @annotatesubrepoerror
768 def diff(self, ui, diffopts, node2, match, prefix, **opts):
769 def diff(self, ui, diffopts, node2, match, prefix, **opts):
769 try:
770 try:
770 node1 = node.bin(self._state[1])
771 node1 = node.bin(self._state[1])
771 # We currently expect node2 to come from substate and be
772 # We currently expect node2 to come from substate and be
772 # in hex format
773 # in hex format
773 if node2 is not None:
774 if node2 is not None:
774 node2 = node.bin(node2)
775 node2 = node.bin(node2)
775 cmdutil.diffordiffstat(ui, self._repo, diffopts,
776 cmdutil.diffordiffstat(ui, self._repo, diffopts,
776 node1, node2, match,
777 node1, node2, match,
777 prefix=posixpath.join(prefix, self._path),
778 prefix=posixpath.join(prefix, self._path),
778 listsubrepos=True, **opts)
779 listsubrepos=True, **opts)
779 except error.RepoLookupError as inst:
780 except error.RepoLookupError as inst:
780 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
781 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
781 % (inst, subrelpath(self)))
782 % (inst, subrelpath(self)))
782
783
783 @annotatesubrepoerror
784 @annotatesubrepoerror
784 def archive(self, archiver, prefix, match=None):
785 def archive(self, archiver, prefix, match=None):
785 self._get(self._state + ('hg',))
786 self._get(self._state + ('hg',))
786 total = abstractsubrepo.archive(self, archiver, prefix, match)
787 total = abstractsubrepo.archive(self, archiver, prefix, match)
787 rev = self._state[1]
788 rev = self._state[1]
788 ctx = self._repo[rev]
789 ctx = self._repo[rev]
789 for subpath in ctx.substate:
790 for subpath in ctx.substate:
790 s = subrepo(ctx, subpath, True)
791 s = subrepo(ctx, subpath, True)
791 submatch = matchmod.subdirmatcher(subpath, match)
792 submatch = matchmod.subdirmatcher(subpath, match)
792 total += s.archive(archiver, prefix + self._path + '/', submatch)
793 total += s.archive(archiver, prefix + self._path + '/', submatch)
793 return total
794 return total
794
795
795 @annotatesubrepoerror
796 @annotatesubrepoerror
796 def dirty(self, ignoreupdate=False):
797 def dirty(self, ignoreupdate=False):
797 r = self._state[1]
798 r = self._state[1]
798 if r == '' and not ignoreupdate: # no state recorded
799 if r == '' and not ignoreupdate: # no state recorded
799 return True
800 return True
800 w = self._repo[None]
801 w = self._repo[None]
801 if r != w.p1().hex() and not ignoreupdate:
802 if r != w.p1().hex() and not ignoreupdate:
802 # different version checked out
803 # different version checked out
803 return True
804 return True
804 return w.dirty() # working directory changed
805 return w.dirty() # working directory changed
805
806
806 def basestate(self):
807 def basestate(self):
807 return self._repo['.'].hex()
808 return self._repo['.'].hex()
808
809
809 def checknested(self, path):
810 def checknested(self, path):
810 return self._repo._checknested(self._repo.wjoin(path))
811 return self._repo._checknested(self._repo.wjoin(path))
811
812
812 @annotatesubrepoerror
813 @annotatesubrepoerror
813 def commit(self, text, user, date):
814 def commit(self, text, user, date):
814 # don't bother committing in the subrepo if it's only been
815 # don't bother committing in the subrepo if it's only been
815 # updated
816 # updated
816 if not self.dirty(True):
817 if not self.dirty(True):
817 return self._repo['.'].hex()
818 return self._repo['.'].hex()
818 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
819 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
819 n = self._repo.commit(text, user, date)
820 n = self._repo.commit(text, user, date)
820 if not n:
821 if not n:
821 return self._repo['.'].hex() # different version checked out
822 return self._repo['.'].hex() # different version checked out
822 return node.hex(n)
823 return node.hex(n)
823
824
824 @annotatesubrepoerror
825 @annotatesubrepoerror
825 def phase(self, state):
826 def phase(self, state):
826 return self._repo[state].phase()
827 return self._repo[state].phase()
827
828
828 @annotatesubrepoerror
829 @annotatesubrepoerror
829 def remove(self):
830 def remove(self):
830 # we can't fully delete the repository as it may contain
831 # we can't fully delete the repository as it may contain
831 # local-only history
832 # local-only history
832 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
833 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
833 hg.clean(self._repo, node.nullid, False)
834 hg.clean(self._repo, node.nullid, False)
834
835
835 def _get(self, state):
836 def _get(self, state):
836 source, revision, kind = state
837 source, revision, kind = state
837 if revision in self._repo.unfiltered():
838 if revision in self._repo.unfiltered():
838 return True
839 return True
839 self._repo._subsource = source
840 self._repo._subsource = source
840 srcurl = _abssource(self._repo)
841 srcurl = _abssource(self._repo)
841 other = hg.peer(self._repo, {}, srcurl)
842 other = hg.peer(self._repo, {}, srcurl)
842 if len(self._repo) == 0:
843 if len(self._repo) == 0:
843 self.ui.status(_('cloning subrepo %s from %s\n')
844 self.ui.status(_('cloning subrepo %s from %s\n')
844 % (subrelpath(self), srcurl))
845 % (subrelpath(self), srcurl))
845 parentrepo = self._repo._subparent
846 parentrepo = self._repo._subparent
846 # use self._repo.vfs instead of self.wvfs to remove .hg only
847 # use self._repo.vfs instead of self.wvfs to remove .hg only
847 self._repo.vfs.rmtree()
848 self._repo.vfs.rmtree()
848 other, cloned = hg.clone(self._repo._subparent.baseui, {},
849 other, cloned = hg.clone(self._repo._subparent.baseui, {},
849 other, self._repo.root,
850 other, self._repo.root,
850 update=False)
851 update=False)
851 self._repo = cloned.local()
852 self._repo = cloned.local()
852 self._initrepo(parentrepo, source, create=True)
853 self._initrepo(parentrepo, source, create=True)
853 self._cachestorehash(srcurl)
854 self._cachestorehash(srcurl)
854 else:
855 else:
855 self.ui.status(_('pulling subrepo %s from %s\n')
856 self.ui.status(_('pulling subrepo %s from %s\n')
856 % (subrelpath(self), srcurl))
857 % (subrelpath(self), srcurl))
857 cleansub = self.storeclean(srcurl)
858 cleansub = self.storeclean(srcurl)
858 exchange.pull(self._repo, other)
859 exchange.pull(self._repo, other)
859 if cleansub:
860 if cleansub:
860 # keep the repo clean after pull
861 # keep the repo clean after pull
861 self._cachestorehash(srcurl)
862 self._cachestorehash(srcurl)
862 return False
863 return False
863
864
864 @annotatesubrepoerror
865 @annotatesubrepoerror
865 def get(self, state, overwrite=False):
866 def get(self, state, overwrite=False):
866 inrepo = self._get(state)
867 inrepo = self._get(state)
867 source, revision, kind = state
868 source, revision, kind = state
868 repo = self._repo
869 repo = self._repo
869 repo.ui.debug("getting subrepo %s\n" % self._path)
870 repo.ui.debug("getting subrepo %s\n" % self._path)
870 if inrepo:
871 if inrepo:
871 urepo = repo.unfiltered()
872 urepo = repo.unfiltered()
872 ctx = urepo[revision]
873 ctx = urepo[revision]
873 if ctx.hidden():
874 if ctx.hidden():
874 urepo.ui.warn(
875 urepo.ui.warn(
875 _('revision %s in subrepo %s is hidden\n') \
876 _('revision %s in subrepo %s is hidden\n') \
876 % (revision[0:12], self._path))
877 % (revision[0:12], self._path))
877 repo = urepo
878 repo = urepo
878 hg.updaterepo(repo, revision, overwrite)
879 hg.updaterepo(repo, revision, overwrite)
879
880
880 @annotatesubrepoerror
881 @annotatesubrepoerror
881 def merge(self, state):
882 def merge(self, state):
882 self._get(state)
883 self._get(state)
883 cur = self._repo['.']
884 cur = self._repo['.']
884 dst = self._repo[state[1]]
885 dst = self._repo[state[1]]
885 anc = dst.ancestor(cur)
886 anc = dst.ancestor(cur)
886
887
887 def mergefunc():
888 def mergefunc():
888 if anc == cur and dst.branch() == cur.branch():
889 if anc == cur and dst.branch() == cur.branch():
889 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
890 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
890 hg.update(self._repo, state[1])
891 hg.update(self._repo, state[1])
891 elif anc == dst:
892 elif anc == dst:
892 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
893 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
893 else:
894 else:
894 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
895 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
895 hg.merge(self._repo, state[1], remind=False)
896 hg.merge(self._repo, state[1], remind=False)
896
897
897 wctx = self._repo[None]
898 wctx = self._repo[None]
898 if self.dirty():
899 if self.dirty():
899 if anc != dst:
900 if anc != dst:
900 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
901 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
901 mergefunc()
902 mergefunc()
902 else:
903 else:
903 mergefunc()
904 mergefunc()
904 else:
905 else:
905 mergefunc()
906 mergefunc()
906
907
907 @annotatesubrepoerror
908 @annotatesubrepoerror
908 def push(self, opts):
909 def push(self, opts):
909 force = opts.get('force')
910 force = opts.get('force')
910 newbranch = opts.get('new_branch')
911 newbranch = opts.get('new_branch')
911 ssh = opts.get('ssh')
912 ssh = opts.get('ssh')
912
913
913 # push subrepos depth-first for coherent ordering
914 # push subrepos depth-first for coherent ordering
914 c = self._repo['']
915 c = self._repo['']
915 subs = c.substate # only repos that are committed
916 subs = c.substate # only repos that are committed
916 for s in sorted(subs):
917 for s in sorted(subs):
917 if c.sub(s).push(opts) == 0:
918 if c.sub(s).push(opts) == 0:
918 return False
919 return False
919
920
920 dsturl = _abssource(self._repo, True)
921 dsturl = _abssource(self._repo, True)
921 if not force:
922 if not force:
922 if self.storeclean(dsturl):
923 if self.storeclean(dsturl):
923 self.ui.status(
924 self.ui.status(
924 _('no changes made to subrepo %s since last push to %s\n')
925 _('no changes made to subrepo %s since last push to %s\n')
925 % (subrelpath(self), dsturl))
926 % (subrelpath(self), dsturl))
926 return None
927 return None
927 self.ui.status(_('pushing subrepo %s to %s\n') %
928 self.ui.status(_('pushing subrepo %s to %s\n') %
928 (subrelpath(self), dsturl))
929 (subrelpath(self), dsturl))
929 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
930 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
930 res = exchange.push(self._repo, other, force, newbranch=newbranch)
931 res = exchange.push(self._repo, other, force, newbranch=newbranch)
931
932
932 # the repo is now clean
933 # the repo is now clean
933 self._cachestorehash(dsturl)
934 self._cachestorehash(dsturl)
934 return res.cgresult
935 return res.cgresult
935
936
936 @annotatesubrepoerror
937 @annotatesubrepoerror
937 def outgoing(self, ui, dest, opts):
938 def outgoing(self, ui, dest, opts):
938 if 'rev' in opts or 'branch' in opts:
939 if 'rev' in opts or 'branch' in opts:
939 opts = copy.copy(opts)
940 opts = copy.copy(opts)
940 opts.pop('rev', None)
941 opts.pop('rev', None)
941 opts.pop('branch', None)
942 opts.pop('branch', None)
942 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
943 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
943
944
944 @annotatesubrepoerror
945 @annotatesubrepoerror
945 def incoming(self, ui, source, opts):
946 def incoming(self, ui, source, opts):
946 if 'rev' in opts or 'branch' in opts:
947 if 'rev' in opts or 'branch' in opts:
947 opts = copy.copy(opts)
948 opts = copy.copy(opts)
948 opts.pop('rev', None)
949 opts.pop('rev', None)
949 opts.pop('branch', None)
950 opts.pop('branch', None)
950 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
951 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
951
952
952 @annotatesubrepoerror
953 @annotatesubrepoerror
953 def files(self):
954 def files(self):
954 rev = self._state[1]
955 rev = self._state[1]
955 ctx = self._repo[rev]
956 ctx = self._repo[rev]
956 return ctx.manifest().keys()
957 return ctx.manifest().keys()
957
958
958 def filedata(self, name):
959 def filedata(self, name):
959 rev = self._state[1]
960 rev = self._state[1]
960 return self._repo[rev][name].data()
961 return self._repo[rev][name].data()
961
962
962 def fileflags(self, name):
963 def fileflags(self, name):
963 rev = self._state[1]
964 rev = self._state[1]
964 ctx = self._repo[rev]
965 ctx = self._repo[rev]
965 return ctx.flags(name)
966 return ctx.flags(name)
966
967
967 @annotatesubrepoerror
968 @annotatesubrepoerror
968 def printfiles(self, ui, m, fm, fmt, subrepos):
969 def printfiles(self, ui, m, fm, fmt, subrepos):
969 # If the parent context is a workingctx, use the workingctx here for
970 # If the parent context is a workingctx, use the workingctx here for
970 # consistency.
971 # consistency.
971 if self._ctx.rev() is None:
972 if self._ctx.rev() is None:
972 ctx = self._repo[None]
973 ctx = self._repo[None]
973 else:
974 else:
974 rev = self._state[1]
975 rev = self._state[1]
975 ctx = self._repo[rev]
976 ctx = self._repo[rev]
976 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
977 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
977
978
978 @annotatesubrepoerror
979 @annotatesubrepoerror
979 def getfileset(self, expr):
980 def getfileset(self, expr):
980 if self._ctx.rev() is None:
981 if self._ctx.rev() is None:
981 ctx = self._repo[None]
982 ctx = self._repo[None]
982 else:
983 else:
983 rev = self._state[1]
984 rev = self._state[1]
984 ctx = self._repo[rev]
985 ctx = self._repo[rev]
985
986
986 files = ctx.getfileset(expr)
987 files = ctx.getfileset(expr)
987
988
988 for subpath in ctx.substate:
989 for subpath in ctx.substate:
989 sub = ctx.sub(subpath)
990 sub = ctx.sub(subpath)
990
991
991 try:
992 try:
992 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
993 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
993 except error.LookupError:
994 except error.LookupError:
994 self.ui.status(_("skipping missing subrepository: %s\n")
995 self.ui.status(_("skipping missing subrepository: %s\n")
995 % self.wvfs.reljoin(reporelpath(self), subpath))
996 % self.wvfs.reljoin(reporelpath(self), subpath))
996 return files
997 return files
997
998
998 def walk(self, match):
999 def walk(self, match):
999 ctx = self._repo[None]
1000 ctx = self._repo[None]
1000 return ctx.walk(match)
1001 return ctx.walk(match)
1001
1002
1002 @annotatesubrepoerror
1003 @annotatesubrepoerror
1003 def forget(self, match, prefix):
1004 def forget(self, match, prefix):
1004 return cmdutil.forget(self.ui, self._repo, match,
1005 return cmdutil.forget(self.ui, self._repo, match,
1005 self.wvfs.reljoin(prefix, self._path), True)
1006 self.wvfs.reljoin(prefix, self._path), True)
1006
1007
1007 @annotatesubrepoerror
1008 @annotatesubrepoerror
1008 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1009 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1009 return cmdutil.remove(self.ui, self._repo, matcher,
1010 return cmdutil.remove(self.ui, self._repo, matcher,
1010 self.wvfs.reljoin(prefix, self._path),
1011 self.wvfs.reljoin(prefix, self._path),
1011 after, force, subrepos)
1012 after, force, subrepos)
1012
1013
1013 @annotatesubrepoerror
1014 @annotatesubrepoerror
1014 def revert(self, substate, *pats, **opts):
1015 def revert(self, substate, *pats, **opts):
1015 # reverting a subrepo is a 2 step process:
1016 # reverting a subrepo is a 2 step process:
1016 # 1. if the no_backup is not set, revert all modified
1017 # 1. if the no_backup is not set, revert all modified
1017 # files inside the subrepo
1018 # files inside the subrepo
1018 # 2. update the subrepo to the revision specified in
1019 # 2. update the subrepo to the revision specified in
1019 # the corresponding substate dictionary
1020 # the corresponding substate dictionary
1020 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1021 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1021 if not opts.get('no_backup'):
1022 if not opts.get('no_backup'):
1022 # Revert all files on the subrepo, creating backups
1023 # Revert all files on the subrepo, creating backups
1023 # Note that this will not recursively revert subrepos
1024 # Note that this will not recursively revert subrepos
1024 # We could do it if there was a set:subrepos() predicate
1025 # We could do it if there was a set:subrepos() predicate
1025 opts = opts.copy()
1026 opts = opts.copy()
1026 opts['date'] = None
1027 opts['date'] = None
1027 opts['rev'] = substate[1]
1028 opts['rev'] = substate[1]
1028
1029
1029 self.filerevert(*pats, **opts)
1030 self.filerevert(*pats, **opts)
1030
1031
1031 # Update the repo to the revision specified in the given substate
1032 # Update the repo to the revision specified in the given substate
1032 if not opts.get('dry_run'):
1033 if not opts.get('dry_run'):
1033 self.get(substate, overwrite=True)
1034 self.get(substate, overwrite=True)
1034
1035
1035 def filerevert(self, *pats, **opts):
1036 def filerevert(self, *pats, **opts):
1036 ctx = self._repo[opts['rev']]
1037 ctx = self._repo[opts['rev']]
1037 parents = self._repo.dirstate.parents()
1038 parents = self._repo.dirstate.parents()
1038 if opts.get('all'):
1039 if opts.get('all'):
1039 pats = ['set:modified()']
1040 pats = ['set:modified()']
1040 else:
1041 else:
1041 pats = []
1042 pats = []
1042 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1043 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1043
1044
1044 def shortid(self, revid):
1045 def shortid(self, revid):
1045 return revid[:12]
1046 return revid[:12]
1046
1047
1047 def verify(self):
1048 def verify(self):
1048 try:
1049 try:
1049 rev = self._state[1]
1050 rev = self._state[1]
1050 ctx = self._repo.unfiltered()[rev]
1051 ctx = self._repo.unfiltered()[rev]
1051 if ctx.hidden():
1052 if ctx.hidden():
1052 # Since hidden revisions aren't pushed/pulled, it seems worth an
1053 # Since hidden revisions aren't pushed/pulled, it seems worth an
1053 # explicit warning.
1054 # explicit warning.
1054 ui = self._repo.ui
1055 ui = self._repo.ui
1055 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1056 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1056 (self._relpath, node.short(self._ctx.node())))
1057 (self._relpath, node.short(self._ctx.node())))
1057 return 0
1058 return 0
1058 except error.RepoLookupError:
1059 except error.RepoLookupError:
1059 # A missing subrepo revision may be a case of needing to pull it, so
1060 # A missing subrepo revision may be a case of needing to pull it, so
1060 # don't treat this as an error.
1061 # don't treat this as an error.
1061 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1062 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1062 (self._relpath, node.short(self._ctx.node())))
1063 (self._relpath, node.short(self._ctx.node())))
1063 return 0
1064 return 0
1064
1065
1065 @propertycache
1066 @propertycache
1066 def wvfs(self):
1067 def wvfs(self):
1067 """return own wvfs for efficiency and consistency
1068 """return own wvfs for efficiency and consistency
1068 """
1069 """
1069 return self._repo.wvfs
1070 return self._repo.wvfs
1070
1071
1071 @propertycache
1072 @propertycache
1072 def _relpath(self):
1073 def _relpath(self):
1073 """return path to this subrepository as seen from outermost repository
1074 """return path to this subrepository as seen from outermost repository
1074 """
1075 """
1075 # Keep consistent dir separators by avoiding vfs.join(self._path)
1076 # Keep consistent dir separators by avoiding vfs.join(self._path)
1076 return reporelpath(self._repo)
1077 return reporelpath(self._repo)
1077
1078
1078 class svnsubrepo(abstractsubrepo):
1079 class svnsubrepo(abstractsubrepo):
1079 def __init__(self, ctx, path, state, allowcreate):
1080 def __init__(self, ctx, path, state, allowcreate):
1080 super(svnsubrepo, self).__init__(ctx, path)
1081 super(svnsubrepo, self).__init__(ctx, path)
1081 self._state = state
1082 self._state = state
1082 self._exe = util.findexe('svn')
1083 self._exe = util.findexe('svn')
1083 if not self._exe:
1084 if not self._exe:
1084 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1085 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1085 % self._path)
1086 % self._path)
1086
1087
1087 def _svncommand(self, commands, filename='', failok=False):
1088 def _svncommand(self, commands, filename='', failok=False):
1088 cmd = [self._exe]
1089 cmd = [self._exe]
1089 extrakw = {}
1090 extrakw = {}
1090 if not self.ui.interactive():
1091 if not self.ui.interactive():
1091 # Making stdin be a pipe should prevent svn from behaving
1092 # Making stdin be a pipe should prevent svn from behaving
1092 # interactively even if we can't pass --non-interactive.
1093 # interactively even if we can't pass --non-interactive.
1093 extrakw['stdin'] = subprocess.PIPE
1094 extrakw['stdin'] = subprocess.PIPE
1094 # Starting in svn 1.5 --non-interactive is a global flag
1095 # Starting in svn 1.5 --non-interactive is a global flag
1095 # instead of being per-command, but we need to support 1.4 so
1096 # instead of being per-command, but we need to support 1.4 so
1096 # we have to be intelligent about what commands take
1097 # we have to be intelligent about what commands take
1097 # --non-interactive.
1098 # --non-interactive.
1098 if commands[0] in ('update', 'checkout', 'commit'):
1099 if commands[0] in ('update', 'checkout', 'commit'):
1099 cmd.append('--non-interactive')
1100 cmd.append('--non-interactive')
1100 cmd.extend(commands)
1101 cmd.extend(commands)
1101 if filename is not None:
1102 if filename is not None:
1102 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1103 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1103 self._path, filename)
1104 self._path, filename)
1104 cmd.append(path)
1105 cmd.append(path)
1105 env = dict(os.environ)
1106 env = dict(encoding.environ)
1106 # Avoid localized output, preserve current locale for everything else.
1107 # Avoid localized output, preserve current locale for everything else.
1107 lc_all = env.get('LC_ALL')
1108 lc_all = env.get('LC_ALL')
1108 if lc_all:
1109 if lc_all:
1109 env['LANG'] = lc_all
1110 env['LANG'] = lc_all
1110 del env['LC_ALL']
1111 del env['LC_ALL']
1111 env['LC_MESSAGES'] = 'C'
1112 env['LC_MESSAGES'] = 'C'
1112 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1113 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1113 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1114 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1114 universal_newlines=True, env=env, **extrakw)
1115 universal_newlines=True, env=env, **extrakw)
1115 stdout, stderr = p.communicate()
1116 stdout, stderr = p.communicate()
1116 stderr = stderr.strip()
1117 stderr = stderr.strip()
1117 if not failok:
1118 if not failok:
1118 if p.returncode:
1119 if p.returncode:
1119 raise error.Abort(stderr or 'exited with code %d'
1120 raise error.Abort(stderr or 'exited with code %d'
1120 % p.returncode)
1121 % p.returncode)
1121 if stderr:
1122 if stderr:
1122 self.ui.warn(stderr + '\n')
1123 self.ui.warn(stderr + '\n')
1123 return stdout, stderr
1124 return stdout, stderr
1124
1125
1125 @propertycache
1126 @propertycache
1126 def _svnversion(self):
1127 def _svnversion(self):
1127 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1128 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1128 m = re.search(r'^(\d+)\.(\d+)', output)
1129 m = re.search(r'^(\d+)\.(\d+)', output)
1129 if not m:
1130 if not m:
1130 raise error.Abort(_('cannot retrieve svn tool version'))
1131 raise error.Abort(_('cannot retrieve svn tool version'))
1131 return (int(m.group(1)), int(m.group(2)))
1132 return (int(m.group(1)), int(m.group(2)))
1132
1133
1133 def _wcrevs(self):
1134 def _wcrevs(self):
1134 # Get the working directory revision as well as the last
1135 # Get the working directory revision as well as the last
1135 # commit revision so we can compare the subrepo state with
1136 # commit revision so we can compare the subrepo state with
1136 # both. We used to store the working directory one.
1137 # both. We used to store the working directory one.
1137 output, err = self._svncommand(['info', '--xml'])
1138 output, err = self._svncommand(['info', '--xml'])
1138 doc = xml.dom.minidom.parseString(output)
1139 doc = xml.dom.minidom.parseString(output)
1139 entries = doc.getElementsByTagName('entry')
1140 entries = doc.getElementsByTagName('entry')
1140 lastrev, rev = '0', '0'
1141 lastrev, rev = '0', '0'
1141 if entries:
1142 if entries:
1142 rev = str(entries[0].getAttribute('revision')) or '0'
1143 rev = str(entries[0].getAttribute('revision')) or '0'
1143 commits = entries[0].getElementsByTagName('commit')
1144 commits = entries[0].getElementsByTagName('commit')
1144 if commits:
1145 if commits:
1145 lastrev = str(commits[0].getAttribute('revision')) or '0'
1146 lastrev = str(commits[0].getAttribute('revision')) or '0'
1146 return (lastrev, rev)
1147 return (lastrev, rev)
1147
1148
1148 def _wcrev(self):
1149 def _wcrev(self):
1149 return self._wcrevs()[0]
1150 return self._wcrevs()[0]
1150
1151
1151 def _wcchanged(self):
1152 def _wcchanged(self):
1152 """Return (changes, extchanges, missing) where changes is True
1153 """Return (changes, extchanges, missing) where changes is True
1153 if the working directory was changed, extchanges is
1154 if the working directory was changed, extchanges is
1154 True if any of these changes concern an external entry and missing
1155 True if any of these changes concern an external entry and missing
1155 is True if any change is a missing entry.
1156 is True if any change is a missing entry.
1156 """
1157 """
1157 output, err = self._svncommand(['status', '--xml'])
1158 output, err = self._svncommand(['status', '--xml'])
1158 externals, changes, missing = [], [], []
1159 externals, changes, missing = [], [], []
1159 doc = xml.dom.minidom.parseString(output)
1160 doc = xml.dom.minidom.parseString(output)
1160 for e in doc.getElementsByTagName('entry'):
1161 for e in doc.getElementsByTagName('entry'):
1161 s = e.getElementsByTagName('wc-status')
1162 s = e.getElementsByTagName('wc-status')
1162 if not s:
1163 if not s:
1163 continue
1164 continue
1164 item = s[0].getAttribute('item')
1165 item = s[0].getAttribute('item')
1165 props = s[0].getAttribute('props')
1166 props = s[0].getAttribute('props')
1166 path = e.getAttribute('path')
1167 path = e.getAttribute('path')
1167 if item == 'external':
1168 if item == 'external':
1168 externals.append(path)
1169 externals.append(path)
1169 elif item == 'missing':
1170 elif item == 'missing':
1170 missing.append(path)
1171 missing.append(path)
1171 if (item not in ('', 'normal', 'unversioned', 'external')
1172 if (item not in ('', 'normal', 'unversioned', 'external')
1172 or props not in ('', 'none', 'normal')):
1173 or props not in ('', 'none', 'normal')):
1173 changes.append(path)
1174 changes.append(path)
1174 for path in changes:
1175 for path in changes:
1175 for ext in externals:
1176 for ext in externals:
1176 if path == ext or path.startswith(ext + pycompat.ossep):
1177 if path == ext or path.startswith(ext + pycompat.ossep):
1177 return True, True, bool(missing)
1178 return True, True, bool(missing)
1178 return bool(changes), False, bool(missing)
1179 return bool(changes), False, bool(missing)
1179
1180
1180 def dirty(self, ignoreupdate=False):
1181 def dirty(self, ignoreupdate=False):
1181 if not self._wcchanged()[0]:
1182 if not self._wcchanged()[0]:
1182 if self._state[1] in self._wcrevs() or ignoreupdate:
1183 if self._state[1] in self._wcrevs() or ignoreupdate:
1183 return False
1184 return False
1184 return True
1185 return True
1185
1186
1186 def basestate(self):
1187 def basestate(self):
1187 lastrev, rev = self._wcrevs()
1188 lastrev, rev = self._wcrevs()
1188 if lastrev != rev:
1189 if lastrev != rev:
1189 # Last committed rev is not the same than rev. We would
1190 # Last committed rev is not the same than rev. We would
1190 # like to take lastrev but we do not know if the subrepo
1191 # like to take lastrev but we do not know if the subrepo
1191 # URL exists at lastrev. Test it and fallback to rev it
1192 # URL exists at lastrev. Test it and fallback to rev it
1192 # is not there.
1193 # is not there.
1193 try:
1194 try:
1194 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1195 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1195 return lastrev
1196 return lastrev
1196 except error.Abort:
1197 except error.Abort:
1197 pass
1198 pass
1198 return rev
1199 return rev
1199
1200
1200 @annotatesubrepoerror
1201 @annotatesubrepoerror
1201 def commit(self, text, user, date):
1202 def commit(self, text, user, date):
1202 # user and date are out of our hands since svn is centralized
1203 # user and date are out of our hands since svn is centralized
1203 changed, extchanged, missing = self._wcchanged()
1204 changed, extchanged, missing = self._wcchanged()
1204 if not changed:
1205 if not changed:
1205 return self.basestate()
1206 return self.basestate()
1206 if extchanged:
1207 if extchanged:
1207 # Do not try to commit externals
1208 # Do not try to commit externals
1208 raise error.Abort(_('cannot commit svn externals'))
1209 raise error.Abort(_('cannot commit svn externals'))
1209 if missing:
1210 if missing:
1210 # svn can commit with missing entries but aborting like hg
1211 # svn can commit with missing entries but aborting like hg
1211 # seems a better approach.
1212 # seems a better approach.
1212 raise error.Abort(_('cannot commit missing svn entries'))
1213 raise error.Abort(_('cannot commit missing svn entries'))
1213 commitinfo, err = self._svncommand(['commit', '-m', text])
1214 commitinfo, err = self._svncommand(['commit', '-m', text])
1214 self.ui.status(commitinfo)
1215 self.ui.status(commitinfo)
1215 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1216 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1216 if not newrev:
1217 if not newrev:
1217 if not commitinfo.strip():
1218 if not commitinfo.strip():
1218 # Sometimes, our definition of "changed" differs from
1219 # Sometimes, our definition of "changed" differs from
1219 # svn one. For instance, svn ignores missing files
1220 # svn one. For instance, svn ignores missing files
1220 # when committing. If there are only missing files, no
1221 # when committing. If there are only missing files, no
1221 # commit is made, no output and no error code.
1222 # commit is made, no output and no error code.
1222 raise error.Abort(_('failed to commit svn changes'))
1223 raise error.Abort(_('failed to commit svn changes'))
1223 raise error.Abort(commitinfo.splitlines()[-1])
1224 raise error.Abort(commitinfo.splitlines()[-1])
1224 newrev = newrev.groups()[0]
1225 newrev = newrev.groups()[0]
1225 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1226 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1226 return newrev
1227 return newrev
1227
1228
1228 @annotatesubrepoerror
1229 @annotatesubrepoerror
1229 def remove(self):
1230 def remove(self):
1230 if self.dirty():
1231 if self.dirty():
1231 self.ui.warn(_('not removing repo %s because '
1232 self.ui.warn(_('not removing repo %s because '
1232 'it has changes.\n') % self._path)
1233 'it has changes.\n') % self._path)
1233 return
1234 return
1234 self.ui.note(_('removing subrepo %s\n') % self._path)
1235 self.ui.note(_('removing subrepo %s\n') % self._path)
1235
1236
1236 self.wvfs.rmtree(forcibly=True)
1237 self.wvfs.rmtree(forcibly=True)
1237 try:
1238 try:
1238 pwvfs = self._ctx.repo().wvfs
1239 pwvfs = self._ctx.repo().wvfs
1239 pwvfs.removedirs(pwvfs.dirname(self._path))
1240 pwvfs.removedirs(pwvfs.dirname(self._path))
1240 except OSError:
1241 except OSError:
1241 pass
1242 pass
1242
1243
1243 @annotatesubrepoerror
1244 @annotatesubrepoerror
1244 def get(self, state, overwrite=False):
1245 def get(self, state, overwrite=False):
1245 if overwrite:
1246 if overwrite:
1246 self._svncommand(['revert', '--recursive'])
1247 self._svncommand(['revert', '--recursive'])
1247 args = ['checkout']
1248 args = ['checkout']
1248 if self._svnversion >= (1, 5):
1249 if self._svnversion >= (1, 5):
1249 args.append('--force')
1250 args.append('--force')
1250 # The revision must be specified at the end of the URL to properly
1251 # The revision must be specified at the end of the URL to properly
1251 # update to a directory which has since been deleted and recreated.
1252 # update to a directory which has since been deleted and recreated.
1252 args.append('%s@%s' % (state[0], state[1]))
1253 args.append('%s@%s' % (state[0], state[1]))
1253 status, err = self._svncommand(args, failok=True)
1254 status, err = self._svncommand(args, failok=True)
1254 _sanitize(self.ui, self.wvfs, '.svn')
1255 _sanitize(self.ui, self.wvfs, '.svn')
1255 if not re.search('Checked out revision [0-9]+.', status):
1256 if not re.search('Checked out revision [0-9]+.', status):
1256 if ('is already a working copy for a different URL' in err
1257 if ('is already a working copy for a different URL' in err
1257 and (self._wcchanged()[:2] == (False, False))):
1258 and (self._wcchanged()[:2] == (False, False))):
1258 # obstructed but clean working copy, so just blow it away.
1259 # obstructed but clean working copy, so just blow it away.
1259 self.remove()
1260 self.remove()
1260 self.get(state, overwrite=False)
1261 self.get(state, overwrite=False)
1261 return
1262 return
1262 raise error.Abort((status or err).splitlines()[-1])
1263 raise error.Abort((status or err).splitlines()[-1])
1263 self.ui.status(status)
1264 self.ui.status(status)
1264
1265
1265 @annotatesubrepoerror
1266 @annotatesubrepoerror
1266 def merge(self, state):
1267 def merge(self, state):
1267 old = self._state[1]
1268 old = self._state[1]
1268 new = state[1]
1269 new = state[1]
1269 wcrev = self._wcrev()
1270 wcrev = self._wcrev()
1270 if new != wcrev:
1271 if new != wcrev:
1271 dirty = old == wcrev or self._wcchanged()[0]
1272 dirty = old == wcrev or self._wcchanged()[0]
1272 if _updateprompt(self.ui, self, dirty, wcrev, new):
1273 if _updateprompt(self.ui, self, dirty, wcrev, new):
1273 self.get(state, False)
1274 self.get(state, False)
1274
1275
1275 def push(self, opts):
1276 def push(self, opts):
1276 # push is a no-op for SVN
1277 # push is a no-op for SVN
1277 return True
1278 return True
1278
1279
1279 @annotatesubrepoerror
1280 @annotatesubrepoerror
1280 def files(self):
1281 def files(self):
1281 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1282 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1282 doc = xml.dom.minidom.parseString(output)
1283 doc = xml.dom.minidom.parseString(output)
1283 paths = []
1284 paths = []
1284 for e in doc.getElementsByTagName('entry'):
1285 for e in doc.getElementsByTagName('entry'):
1285 kind = str(e.getAttribute('kind'))
1286 kind = str(e.getAttribute('kind'))
1286 if kind != 'file':
1287 if kind != 'file':
1287 continue
1288 continue
1288 name = ''.join(c.data for c
1289 name = ''.join(c.data for c
1289 in e.getElementsByTagName('name')[0].childNodes
1290 in e.getElementsByTagName('name')[0].childNodes
1290 if c.nodeType == c.TEXT_NODE)
1291 if c.nodeType == c.TEXT_NODE)
1291 paths.append(name.encode('utf-8'))
1292 paths.append(name.encode('utf-8'))
1292 return paths
1293 return paths
1293
1294
1294 def filedata(self, name):
1295 def filedata(self, name):
1295 return self._svncommand(['cat'], name)[0]
1296 return self._svncommand(['cat'], name)[0]
1296
1297
1297
1298
1298 class gitsubrepo(abstractsubrepo):
1299 class gitsubrepo(abstractsubrepo):
1299 def __init__(self, ctx, path, state, allowcreate):
1300 def __init__(self, ctx, path, state, allowcreate):
1300 super(gitsubrepo, self).__init__(ctx, path)
1301 super(gitsubrepo, self).__init__(ctx, path)
1301 self._state = state
1302 self._state = state
1302 self._abspath = ctx.repo().wjoin(path)
1303 self._abspath = ctx.repo().wjoin(path)
1303 self._subparent = ctx.repo()
1304 self._subparent = ctx.repo()
1304 self._ensuregit()
1305 self._ensuregit()
1305
1306
1306 def _ensuregit(self):
1307 def _ensuregit(self):
1307 try:
1308 try:
1308 self._gitexecutable = 'git'
1309 self._gitexecutable = 'git'
1309 out, err = self._gitnodir(['--version'])
1310 out, err = self._gitnodir(['--version'])
1310 except OSError as e:
1311 except OSError as e:
1311 genericerror = _("error executing git for subrepo '%s': %s")
1312 genericerror = _("error executing git for subrepo '%s': %s")
1312 notfoundhint = _("check git is installed and in your PATH")
1313 notfoundhint = _("check git is installed and in your PATH")
1313 if e.errno != errno.ENOENT:
1314 if e.errno != errno.ENOENT:
1314 raise error.Abort(genericerror % (self._path, e.strerror))
1315 raise error.Abort(genericerror % (self._path, e.strerror))
1315 elif os.name == 'nt':
1316 elif os.name == 'nt':
1316 try:
1317 try:
1317 self._gitexecutable = 'git.cmd'
1318 self._gitexecutable = 'git.cmd'
1318 out, err = self._gitnodir(['--version'])
1319 out, err = self._gitnodir(['--version'])
1319 except OSError as e2:
1320 except OSError as e2:
1320 if e2.errno == errno.ENOENT:
1321 if e2.errno == errno.ENOENT:
1321 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1322 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1322 " for subrepo '%s'") % self._path,
1323 " for subrepo '%s'") % self._path,
1323 hint=notfoundhint)
1324 hint=notfoundhint)
1324 else:
1325 else:
1325 raise error.Abort(genericerror % (self._path,
1326 raise error.Abort(genericerror % (self._path,
1326 e2.strerror))
1327 e2.strerror))
1327 else:
1328 else:
1328 raise error.Abort(_("couldn't find git for subrepo '%s'")
1329 raise error.Abort(_("couldn't find git for subrepo '%s'")
1329 % self._path, hint=notfoundhint)
1330 % self._path, hint=notfoundhint)
1330 versionstatus = self._checkversion(out)
1331 versionstatus = self._checkversion(out)
1331 if versionstatus == 'unknown':
1332 if versionstatus == 'unknown':
1332 self.ui.warn(_('cannot retrieve git version\n'))
1333 self.ui.warn(_('cannot retrieve git version\n'))
1333 elif versionstatus == 'abort':
1334 elif versionstatus == 'abort':
1334 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1335 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1335 elif versionstatus == 'warning':
1336 elif versionstatus == 'warning':
1336 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1337 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1337
1338
1338 @staticmethod
1339 @staticmethod
1339 def _gitversion(out):
1340 def _gitversion(out):
1340 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1341 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1341 if m:
1342 if m:
1342 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1343 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1343
1344
1344 m = re.search(r'^git version (\d+)\.(\d+)', out)
1345 m = re.search(r'^git version (\d+)\.(\d+)', out)
1345 if m:
1346 if m:
1346 return (int(m.group(1)), int(m.group(2)), 0)
1347 return (int(m.group(1)), int(m.group(2)), 0)
1347
1348
1348 return -1
1349 return -1
1349
1350
1350 @staticmethod
1351 @staticmethod
1351 def _checkversion(out):
1352 def _checkversion(out):
1352 '''ensure git version is new enough
1353 '''ensure git version is new enough
1353
1354
1354 >>> _checkversion = gitsubrepo._checkversion
1355 >>> _checkversion = gitsubrepo._checkversion
1355 >>> _checkversion('git version 1.6.0')
1356 >>> _checkversion('git version 1.6.0')
1356 'ok'
1357 'ok'
1357 >>> _checkversion('git version 1.8.5')
1358 >>> _checkversion('git version 1.8.5')
1358 'ok'
1359 'ok'
1359 >>> _checkversion('git version 1.4.0')
1360 >>> _checkversion('git version 1.4.0')
1360 'abort'
1361 'abort'
1361 >>> _checkversion('git version 1.5.0')
1362 >>> _checkversion('git version 1.5.0')
1362 'warning'
1363 'warning'
1363 >>> _checkversion('git version 1.9-rc0')
1364 >>> _checkversion('git version 1.9-rc0')
1364 'ok'
1365 'ok'
1365 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1366 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1366 'ok'
1367 'ok'
1367 >>> _checkversion('git version 1.9.0.GIT')
1368 >>> _checkversion('git version 1.9.0.GIT')
1368 'ok'
1369 'ok'
1369 >>> _checkversion('git version 12345')
1370 >>> _checkversion('git version 12345')
1370 'unknown'
1371 'unknown'
1371 >>> _checkversion('no')
1372 >>> _checkversion('no')
1372 'unknown'
1373 'unknown'
1373 '''
1374 '''
1374 version = gitsubrepo._gitversion(out)
1375 version = gitsubrepo._gitversion(out)
1375 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1376 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1376 # despite the docstring comment. For now, error on 1.4.0, warn on
1377 # despite the docstring comment. For now, error on 1.4.0, warn on
1377 # 1.5.0 but attempt to continue.
1378 # 1.5.0 but attempt to continue.
1378 if version == -1:
1379 if version == -1:
1379 return 'unknown'
1380 return 'unknown'
1380 if version < (1, 5, 0):
1381 if version < (1, 5, 0):
1381 return 'abort'
1382 return 'abort'
1382 elif version < (1, 6, 0):
1383 elif version < (1, 6, 0):
1383 return 'warning'
1384 return 'warning'
1384 return 'ok'
1385 return 'ok'
1385
1386
1386 def _gitcommand(self, commands, env=None, stream=False):
1387 def _gitcommand(self, commands, env=None, stream=False):
1387 return self._gitdir(commands, env=env, stream=stream)[0]
1388 return self._gitdir(commands, env=env, stream=stream)[0]
1388
1389
1389 def _gitdir(self, commands, env=None, stream=False):
1390 def _gitdir(self, commands, env=None, stream=False):
1390 return self._gitnodir(commands, env=env, stream=stream,
1391 return self._gitnodir(commands, env=env, stream=stream,
1391 cwd=self._abspath)
1392 cwd=self._abspath)
1392
1393
1393 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1394 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1394 """Calls the git command
1395 """Calls the git command
1395
1396
1396 The methods tries to call the git command. versions prior to 1.6.0
1397 The methods tries to call the git command. versions prior to 1.6.0
1397 are not supported and very probably fail.
1398 are not supported and very probably fail.
1398 """
1399 """
1399 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1400 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1400 if env is None:
1401 if env is None:
1401 env = os.environ.copy()
1402 env = encoding.environ.copy()
1402 # disable localization for Git output (issue5176)
1403 # disable localization for Git output (issue5176)
1403 env['LC_ALL'] = 'C'
1404 env['LC_ALL'] = 'C'
1404 # fix for Git CVE-2015-7545
1405 # fix for Git CVE-2015-7545
1405 if 'GIT_ALLOW_PROTOCOL' not in env:
1406 if 'GIT_ALLOW_PROTOCOL' not in env:
1406 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1407 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1407 # unless ui.quiet is set, print git's stderr,
1408 # unless ui.quiet is set, print git's stderr,
1408 # which is mostly progress and useful info
1409 # which is mostly progress and useful info
1409 errpipe = None
1410 errpipe = None
1410 if self.ui.quiet:
1411 if self.ui.quiet:
1411 errpipe = open(os.devnull, 'w')
1412 errpipe = open(os.devnull, 'w')
1412 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1413 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1413 cwd=cwd, env=env, close_fds=util.closefds,
1414 cwd=cwd, env=env, close_fds=util.closefds,
1414 stdout=subprocess.PIPE, stderr=errpipe)
1415 stdout=subprocess.PIPE, stderr=errpipe)
1415 if stream:
1416 if stream:
1416 return p.stdout, None
1417 return p.stdout, None
1417
1418
1418 retdata = p.stdout.read().strip()
1419 retdata = p.stdout.read().strip()
1419 # wait for the child to exit to avoid race condition.
1420 # wait for the child to exit to avoid race condition.
1420 p.wait()
1421 p.wait()
1421
1422
1422 if p.returncode != 0 and p.returncode != 1:
1423 if p.returncode != 0 and p.returncode != 1:
1423 # there are certain error codes that are ok
1424 # there are certain error codes that are ok
1424 command = commands[0]
1425 command = commands[0]
1425 if command in ('cat-file', 'symbolic-ref'):
1426 if command in ('cat-file', 'symbolic-ref'):
1426 return retdata, p.returncode
1427 return retdata, p.returncode
1427 # for all others, abort
1428 # for all others, abort
1428 raise error.Abort(_('git %s error %d in %s') %
1429 raise error.Abort(_('git %s error %d in %s') %
1429 (command, p.returncode, self._relpath))
1430 (command, p.returncode, self._relpath))
1430
1431
1431 return retdata, p.returncode
1432 return retdata, p.returncode
1432
1433
1433 def _gitmissing(self):
1434 def _gitmissing(self):
1434 return not self.wvfs.exists('.git')
1435 return not self.wvfs.exists('.git')
1435
1436
1436 def _gitstate(self):
1437 def _gitstate(self):
1437 return self._gitcommand(['rev-parse', 'HEAD'])
1438 return self._gitcommand(['rev-parse', 'HEAD'])
1438
1439
1439 def _gitcurrentbranch(self):
1440 def _gitcurrentbranch(self):
1440 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1441 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1441 if err:
1442 if err:
1442 current = None
1443 current = None
1443 return current
1444 return current
1444
1445
1445 def _gitremote(self, remote):
1446 def _gitremote(self, remote):
1446 out = self._gitcommand(['remote', 'show', '-n', remote])
1447 out = self._gitcommand(['remote', 'show', '-n', remote])
1447 line = out.split('\n')[1]
1448 line = out.split('\n')[1]
1448 i = line.index('URL: ') + len('URL: ')
1449 i = line.index('URL: ') + len('URL: ')
1449 return line[i:]
1450 return line[i:]
1450
1451
1451 def _githavelocally(self, revision):
1452 def _githavelocally(self, revision):
1452 out, code = self._gitdir(['cat-file', '-e', revision])
1453 out, code = self._gitdir(['cat-file', '-e', revision])
1453 return code == 0
1454 return code == 0
1454
1455
1455 def _gitisancestor(self, r1, r2):
1456 def _gitisancestor(self, r1, r2):
1456 base = self._gitcommand(['merge-base', r1, r2])
1457 base = self._gitcommand(['merge-base', r1, r2])
1457 return base == r1
1458 return base == r1
1458
1459
1459 def _gitisbare(self):
1460 def _gitisbare(self):
1460 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1461 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1461
1462
1462 def _gitupdatestat(self):
1463 def _gitupdatestat(self):
1463 """This must be run before git diff-index.
1464 """This must be run before git diff-index.
1464 diff-index only looks at changes to file stat;
1465 diff-index only looks at changes to file stat;
1465 this command looks at file contents and updates the stat."""
1466 this command looks at file contents and updates the stat."""
1466 self._gitcommand(['update-index', '-q', '--refresh'])
1467 self._gitcommand(['update-index', '-q', '--refresh'])
1467
1468
1468 def _gitbranchmap(self):
1469 def _gitbranchmap(self):
1469 '''returns 2 things:
1470 '''returns 2 things:
1470 a map from git branch to revision
1471 a map from git branch to revision
1471 a map from revision to branches'''
1472 a map from revision to branches'''
1472 branch2rev = {}
1473 branch2rev = {}
1473 rev2branch = {}
1474 rev2branch = {}
1474
1475
1475 out = self._gitcommand(['for-each-ref', '--format',
1476 out = self._gitcommand(['for-each-ref', '--format',
1476 '%(objectname) %(refname)'])
1477 '%(objectname) %(refname)'])
1477 for line in out.split('\n'):
1478 for line in out.split('\n'):
1478 revision, ref = line.split(' ')
1479 revision, ref = line.split(' ')
1479 if (not ref.startswith('refs/heads/') and
1480 if (not ref.startswith('refs/heads/') and
1480 not ref.startswith('refs/remotes/')):
1481 not ref.startswith('refs/remotes/')):
1481 continue
1482 continue
1482 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1483 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1483 continue # ignore remote/HEAD redirects
1484 continue # ignore remote/HEAD redirects
1484 branch2rev[ref] = revision
1485 branch2rev[ref] = revision
1485 rev2branch.setdefault(revision, []).append(ref)
1486 rev2branch.setdefault(revision, []).append(ref)
1486 return branch2rev, rev2branch
1487 return branch2rev, rev2branch
1487
1488
1488 def _gittracking(self, branches):
1489 def _gittracking(self, branches):
1489 'return map of remote branch to local tracking branch'
1490 'return map of remote branch to local tracking branch'
1490 # assumes no more than one local tracking branch for each remote
1491 # assumes no more than one local tracking branch for each remote
1491 tracking = {}
1492 tracking = {}
1492 for b in branches:
1493 for b in branches:
1493 if b.startswith('refs/remotes/'):
1494 if b.startswith('refs/remotes/'):
1494 continue
1495 continue
1495 bname = b.split('/', 2)[2]
1496 bname = b.split('/', 2)[2]
1496 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1497 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1497 if remote:
1498 if remote:
1498 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1499 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1499 tracking['refs/remotes/%s/%s' %
1500 tracking['refs/remotes/%s/%s' %
1500 (remote, ref.split('/', 2)[2])] = b
1501 (remote, ref.split('/', 2)[2])] = b
1501 return tracking
1502 return tracking
1502
1503
1503 def _abssource(self, source):
1504 def _abssource(self, source):
1504 if '://' not in source:
1505 if '://' not in source:
1505 # recognize the scp syntax as an absolute source
1506 # recognize the scp syntax as an absolute source
1506 colon = source.find(':')
1507 colon = source.find(':')
1507 if colon != -1 and '/' not in source[:colon]:
1508 if colon != -1 and '/' not in source[:colon]:
1508 return source
1509 return source
1509 self._subsource = source
1510 self._subsource = source
1510 return _abssource(self)
1511 return _abssource(self)
1511
1512
1512 def _fetch(self, source, revision):
1513 def _fetch(self, source, revision):
1513 if self._gitmissing():
1514 if self._gitmissing():
1514 source = self._abssource(source)
1515 source = self._abssource(source)
1515 self.ui.status(_('cloning subrepo %s from %s\n') %
1516 self.ui.status(_('cloning subrepo %s from %s\n') %
1516 (self._relpath, source))
1517 (self._relpath, source))
1517 self._gitnodir(['clone', source, self._abspath])
1518 self._gitnodir(['clone', source, self._abspath])
1518 if self._githavelocally(revision):
1519 if self._githavelocally(revision):
1519 return
1520 return
1520 self.ui.status(_('pulling subrepo %s from %s\n') %
1521 self.ui.status(_('pulling subrepo %s from %s\n') %
1521 (self._relpath, self._gitremote('origin')))
1522 (self._relpath, self._gitremote('origin')))
1522 # try only origin: the originally cloned repo
1523 # try only origin: the originally cloned repo
1523 self._gitcommand(['fetch'])
1524 self._gitcommand(['fetch'])
1524 if not self._githavelocally(revision):
1525 if not self._githavelocally(revision):
1525 raise error.Abort(_("revision %s does not exist in subrepo %s\n") %
1526 raise error.Abort(_("revision %s does not exist in subrepo %s\n") %
1526 (revision, self._relpath))
1527 (revision, self._relpath))
1527
1528
1528 @annotatesubrepoerror
1529 @annotatesubrepoerror
1529 def dirty(self, ignoreupdate=False):
1530 def dirty(self, ignoreupdate=False):
1530 if self._gitmissing():
1531 if self._gitmissing():
1531 return self._state[1] != ''
1532 return self._state[1] != ''
1532 if self._gitisbare():
1533 if self._gitisbare():
1533 return True
1534 return True
1534 if not ignoreupdate and self._state[1] != self._gitstate():
1535 if not ignoreupdate and self._state[1] != self._gitstate():
1535 # different version checked out
1536 # different version checked out
1536 return True
1537 return True
1537 # check for staged changes or modified files; ignore untracked files
1538 # check for staged changes or modified files; ignore untracked files
1538 self._gitupdatestat()
1539 self._gitupdatestat()
1539 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1540 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1540 return code == 1
1541 return code == 1
1541
1542
1542 def basestate(self):
1543 def basestate(self):
1543 return self._gitstate()
1544 return self._gitstate()
1544
1545
1545 @annotatesubrepoerror
1546 @annotatesubrepoerror
1546 def get(self, state, overwrite=False):
1547 def get(self, state, overwrite=False):
1547 source, revision, kind = state
1548 source, revision, kind = state
1548 if not revision:
1549 if not revision:
1549 self.remove()
1550 self.remove()
1550 return
1551 return
1551 self._fetch(source, revision)
1552 self._fetch(source, revision)
1552 # if the repo was set to be bare, unbare it
1553 # if the repo was set to be bare, unbare it
1553 if self._gitisbare():
1554 if self._gitisbare():
1554 self._gitcommand(['config', 'core.bare', 'false'])
1555 self._gitcommand(['config', 'core.bare', 'false'])
1555 if self._gitstate() == revision:
1556 if self._gitstate() == revision:
1556 self._gitcommand(['reset', '--hard', 'HEAD'])
1557 self._gitcommand(['reset', '--hard', 'HEAD'])
1557 return
1558 return
1558 elif self._gitstate() == revision:
1559 elif self._gitstate() == revision:
1559 if overwrite:
1560 if overwrite:
1560 # first reset the index to unmark new files for commit, because
1561 # first reset the index to unmark new files for commit, because
1561 # reset --hard will otherwise throw away files added for commit,
1562 # reset --hard will otherwise throw away files added for commit,
1562 # not just unmark them.
1563 # not just unmark them.
1563 self._gitcommand(['reset', 'HEAD'])
1564 self._gitcommand(['reset', 'HEAD'])
1564 self._gitcommand(['reset', '--hard', 'HEAD'])
1565 self._gitcommand(['reset', '--hard', 'HEAD'])
1565 return
1566 return
1566 branch2rev, rev2branch = self._gitbranchmap()
1567 branch2rev, rev2branch = self._gitbranchmap()
1567
1568
1568 def checkout(args):
1569 def checkout(args):
1569 cmd = ['checkout']
1570 cmd = ['checkout']
1570 if overwrite:
1571 if overwrite:
1571 # first reset the index to unmark new files for commit, because
1572 # first reset the index to unmark new files for commit, because
1572 # the -f option will otherwise throw away files added for
1573 # the -f option will otherwise throw away files added for
1573 # commit, not just unmark them.
1574 # commit, not just unmark them.
1574 self._gitcommand(['reset', 'HEAD'])
1575 self._gitcommand(['reset', 'HEAD'])
1575 cmd.append('-f')
1576 cmd.append('-f')
1576 self._gitcommand(cmd + args)
1577 self._gitcommand(cmd + args)
1577 _sanitize(self.ui, self.wvfs, '.git')
1578 _sanitize(self.ui, self.wvfs, '.git')
1578
1579
1579 def rawcheckout():
1580 def rawcheckout():
1580 # no branch to checkout, check it out with no branch
1581 # no branch to checkout, check it out with no branch
1581 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1582 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1582 self._relpath)
1583 self._relpath)
1583 self.ui.warn(_('check out a git branch if you intend '
1584 self.ui.warn(_('check out a git branch if you intend '
1584 'to make changes\n'))
1585 'to make changes\n'))
1585 checkout(['-q', revision])
1586 checkout(['-q', revision])
1586
1587
1587 if revision not in rev2branch:
1588 if revision not in rev2branch:
1588 rawcheckout()
1589 rawcheckout()
1589 return
1590 return
1590 branches = rev2branch[revision]
1591 branches = rev2branch[revision]
1591 firstlocalbranch = None
1592 firstlocalbranch = None
1592 for b in branches:
1593 for b in branches:
1593 if b == 'refs/heads/master':
1594 if b == 'refs/heads/master':
1594 # master trumps all other branches
1595 # master trumps all other branches
1595 checkout(['refs/heads/master'])
1596 checkout(['refs/heads/master'])
1596 return
1597 return
1597 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1598 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1598 firstlocalbranch = b
1599 firstlocalbranch = b
1599 if firstlocalbranch:
1600 if firstlocalbranch:
1600 checkout([firstlocalbranch])
1601 checkout([firstlocalbranch])
1601 return
1602 return
1602
1603
1603 tracking = self._gittracking(branch2rev.keys())
1604 tracking = self._gittracking(branch2rev.keys())
1604 # choose a remote branch already tracked if possible
1605 # choose a remote branch already tracked if possible
1605 remote = branches[0]
1606 remote = branches[0]
1606 if remote not in tracking:
1607 if remote not in tracking:
1607 for b in branches:
1608 for b in branches:
1608 if b in tracking:
1609 if b in tracking:
1609 remote = b
1610 remote = b
1610 break
1611 break
1611
1612
1612 if remote not in tracking:
1613 if remote not in tracking:
1613 # create a new local tracking branch
1614 # create a new local tracking branch
1614 local = remote.split('/', 3)[3]
1615 local = remote.split('/', 3)[3]
1615 checkout(['-b', local, remote])
1616 checkout(['-b', local, remote])
1616 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1617 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1617 # When updating to a tracked remote branch,
1618 # When updating to a tracked remote branch,
1618 # if the local tracking branch is downstream of it,
1619 # if the local tracking branch is downstream of it,
1619 # a normal `git pull` would have performed a "fast-forward merge"
1620 # a normal `git pull` would have performed a "fast-forward merge"
1620 # which is equivalent to updating the local branch to the remote.
1621 # which is equivalent to updating the local branch to the remote.
1621 # Since we are only looking at branching at update, we need to
1622 # Since we are only looking at branching at update, we need to
1622 # detect this situation and perform this action lazily.
1623 # detect this situation and perform this action lazily.
1623 if tracking[remote] != self._gitcurrentbranch():
1624 if tracking[remote] != self._gitcurrentbranch():
1624 checkout([tracking[remote]])
1625 checkout([tracking[remote]])
1625 self._gitcommand(['merge', '--ff', remote])
1626 self._gitcommand(['merge', '--ff', remote])
1626 _sanitize(self.ui, self.wvfs, '.git')
1627 _sanitize(self.ui, self.wvfs, '.git')
1627 else:
1628 else:
1628 # a real merge would be required, just checkout the revision
1629 # a real merge would be required, just checkout the revision
1629 rawcheckout()
1630 rawcheckout()
1630
1631
1631 @annotatesubrepoerror
1632 @annotatesubrepoerror
1632 def commit(self, text, user, date):
1633 def commit(self, text, user, date):
1633 if self._gitmissing():
1634 if self._gitmissing():
1634 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1635 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1635 cmd = ['commit', '-a', '-m', text]
1636 cmd = ['commit', '-a', '-m', text]
1636 env = os.environ.copy()
1637 env = encoding.environ.copy()
1637 if user:
1638 if user:
1638 cmd += ['--author', user]
1639 cmd += ['--author', user]
1639 if date:
1640 if date:
1640 # git's date parser silently ignores when seconds < 1e9
1641 # git's date parser silently ignores when seconds < 1e9
1641 # convert to ISO8601
1642 # convert to ISO8601
1642 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1643 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1643 '%Y-%m-%dT%H:%M:%S %1%2')
1644 '%Y-%m-%dT%H:%M:%S %1%2')
1644 self._gitcommand(cmd, env=env)
1645 self._gitcommand(cmd, env=env)
1645 # make sure commit works otherwise HEAD might not exist under certain
1646 # make sure commit works otherwise HEAD might not exist under certain
1646 # circumstances
1647 # circumstances
1647 return self._gitstate()
1648 return self._gitstate()
1648
1649
1649 @annotatesubrepoerror
1650 @annotatesubrepoerror
1650 def merge(self, state):
1651 def merge(self, state):
1651 source, revision, kind = state
1652 source, revision, kind = state
1652 self._fetch(source, revision)
1653 self._fetch(source, revision)
1653 base = self._gitcommand(['merge-base', revision, self._state[1]])
1654 base = self._gitcommand(['merge-base', revision, self._state[1]])
1654 self._gitupdatestat()
1655 self._gitupdatestat()
1655 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1656 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1656
1657
1657 def mergefunc():
1658 def mergefunc():
1658 if base == revision:
1659 if base == revision:
1659 self.get(state) # fast forward merge
1660 self.get(state) # fast forward merge
1660 elif base != self._state[1]:
1661 elif base != self._state[1]:
1661 self._gitcommand(['merge', '--no-commit', revision])
1662 self._gitcommand(['merge', '--no-commit', revision])
1662 _sanitize(self.ui, self.wvfs, '.git')
1663 _sanitize(self.ui, self.wvfs, '.git')
1663
1664
1664 if self.dirty():
1665 if self.dirty():
1665 if self._gitstate() != revision:
1666 if self._gitstate() != revision:
1666 dirty = self._gitstate() == self._state[1] or code != 0
1667 dirty = self._gitstate() == self._state[1] or code != 0
1667 if _updateprompt(self.ui, self, dirty,
1668 if _updateprompt(self.ui, self, dirty,
1668 self._state[1][:7], revision[:7]):
1669 self._state[1][:7], revision[:7]):
1669 mergefunc()
1670 mergefunc()
1670 else:
1671 else:
1671 mergefunc()
1672 mergefunc()
1672
1673
1673 @annotatesubrepoerror
1674 @annotatesubrepoerror
1674 def push(self, opts):
1675 def push(self, opts):
1675 force = opts.get('force')
1676 force = opts.get('force')
1676
1677
1677 if not self._state[1]:
1678 if not self._state[1]:
1678 return True
1679 return True
1679 if self._gitmissing():
1680 if self._gitmissing():
1680 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1681 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1681 # if a branch in origin contains the revision, nothing to do
1682 # if a branch in origin contains the revision, nothing to do
1682 branch2rev, rev2branch = self._gitbranchmap()
1683 branch2rev, rev2branch = self._gitbranchmap()
1683 if self._state[1] in rev2branch:
1684 if self._state[1] in rev2branch:
1684 for b in rev2branch[self._state[1]]:
1685 for b in rev2branch[self._state[1]]:
1685 if b.startswith('refs/remotes/origin/'):
1686 if b.startswith('refs/remotes/origin/'):
1686 return True
1687 return True
1687 for b, revision in branch2rev.iteritems():
1688 for b, revision in branch2rev.iteritems():
1688 if b.startswith('refs/remotes/origin/'):
1689 if b.startswith('refs/remotes/origin/'):
1689 if self._gitisancestor(self._state[1], revision):
1690 if self._gitisancestor(self._state[1], revision):
1690 return True
1691 return True
1691 # otherwise, try to push the currently checked out branch
1692 # otherwise, try to push the currently checked out branch
1692 cmd = ['push']
1693 cmd = ['push']
1693 if force:
1694 if force:
1694 cmd.append('--force')
1695 cmd.append('--force')
1695
1696
1696 current = self._gitcurrentbranch()
1697 current = self._gitcurrentbranch()
1697 if current:
1698 if current:
1698 # determine if the current branch is even useful
1699 # determine if the current branch is even useful
1699 if not self._gitisancestor(self._state[1], current):
1700 if not self._gitisancestor(self._state[1], current):
1700 self.ui.warn(_('unrelated git branch checked out '
1701 self.ui.warn(_('unrelated git branch checked out '
1701 'in subrepo %s\n') % self._relpath)
1702 'in subrepo %s\n') % self._relpath)
1702 return False
1703 return False
1703 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1704 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1704 (current.split('/', 2)[2], self._relpath))
1705 (current.split('/', 2)[2], self._relpath))
1705 ret = self._gitdir(cmd + ['origin', current])
1706 ret = self._gitdir(cmd + ['origin', current])
1706 return ret[1] == 0
1707 return ret[1] == 0
1707 else:
1708 else:
1708 self.ui.warn(_('no branch checked out in subrepo %s\n'
1709 self.ui.warn(_('no branch checked out in subrepo %s\n'
1709 'cannot push revision %s\n') %
1710 'cannot push revision %s\n') %
1710 (self._relpath, self._state[1]))
1711 (self._relpath, self._state[1]))
1711 return False
1712 return False
1712
1713
1713 @annotatesubrepoerror
1714 @annotatesubrepoerror
1714 def add(self, ui, match, prefix, explicitonly, **opts):
1715 def add(self, ui, match, prefix, explicitonly, **opts):
1715 if self._gitmissing():
1716 if self._gitmissing():
1716 return []
1717 return []
1717
1718
1718 (modified, added, removed,
1719 (modified, added, removed,
1719 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1720 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1720 clean=True)
1721 clean=True)
1721
1722
1722 tracked = set()
1723 tracked = set()
1723 # dirstates 'amn' warn, 'r' is added again
1724 # dirstates 'amn' warn, 'r' is added again
1724 for l in (modified, added, deleted, clean):
1725 for l in (modified, added, deleted, clean):
1725 tracked.update(l)
1726 tracked.update(l)
1726
1727
1727 # Unknown files not of interest will be rejected by the matcher
1728 # Unknown files not of interest will be rejected by the matcher
1728 files = unknown
1729 files = unknown
1729 files.extend(match.files())
1730 files.extend(match.files())
1730
1731
1731 rejected = []
1732 rejected = []
1732
1733
1733 files = [f for f in sorted(set(files)) if match(f)]
1734 files = [f for f in sorted(set(files)) if match(f)]
1734 for f in files:
1735 for f in files:
1735 exact = match.exact(f)
1736 exact = match.exact(f)
1736 command = ["add"]
1737 command = ["add"]
1737 if exact:
1738 if exact:
1738 command.append("-f") #should be added, even if ignored
1739 command.append("-f") #should be added, even if ignored
1739 if ui.verbose or not exact:
1740 if ui.verbose or not exact:
1740 ui.status(_('adding %s\n') % match.rel(f))
1741 ui.status(_('adding %s\n') % match.rel(f))
1741
1742
1742 if f in tracked: # hg prints 'adding' even if already tracked
1743 if f in tracked: # hg prints 'adding' even if already tracked
1743 if exact:
1744 if exact:
1744 rejected.append(f)
1745 rejected.append(f)
1745 continue
1746 continue
1746 if not opts.get('dry_run'):
1747 if not opts.get('dry_run'):
1747 self._gitcommand(command + [f])
1748 self._gitcommand(command + [f])
1748
1749
1749 for f in rejected:
1750 for f in rejected:
1750 ui.warn(_("%s already tracked!\n") % match.abs(f))
1751 ui.warn(_("%s already tracked!\n") % match.abs(f))
1751
1752
1752 return rejected
1753 return rejected
1753
1754
1754 @annotatesubrepoerror
1755 @annotatesubrepoerror
1755 def remove(self):
1756 def remove(self):
1756 if self._gitmissing():
1757 if self._gitmissing():
1757 return
1758 return
1758 if self.dirty():
1759 if self.dirty():
1759 self.ui.warn(_('not removing repo %s because '
1760 self.ui.warn(_('not removing repo %s because '
1760 'it has changes.\n') % self._relpath)
1761 'it has changes.\n') % self._relpath)
1761 return
1762 return
1762 # we can't fully delete the repository as it may contain
1763 # we can't fully delete the repository as it may contain
1763 # local-only history
1764 # local-only history
1764 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1765 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1765 self._gitcommand(['config', 'core.bare', 'true'])
1766 self._gitcommand(['config', 'core.bare', 'true'])
1766 for f, kind in self.wvfs.readdir():
1767 for f, kind in self.wvfs.readdir():
1767 if f == '.git':
1768 if f == '.git':
1768 continue
1769 continue
1769 if kind == stat.S_IFDIR:
1770 if kind == stat.S_IFDIR:
1770 self.wvfs.rmtree(f)
1771 self.wvfs.rmtree(f)
1771 else:
1772 else:
1772 self.wvfs.unlink(f)
1773 self.wvfs.unlink(f)
1773
1774
1774 def archive(self, archiver, prefix, match=None):
1775 def archive(self, archiver, prefix, match=None):
1775 total = 0
1776 total = 0
1776 source, revision = self._state
1777 source, revision = self._state
1777 if not revision:
1778 if not revision:
1778 return total
1779 return total
1779 self._fetch(source, revision)
1780 self._fetch(source, revision)
1780
1781
1781 # Parse git's native archive command.
1782 # Parse git's native archive command.
1782 # This should be much faster than manually traversing the trees
1783 # This should be much faster than manually traversing the trees
1783 # and objects with many subprocess calls.
1784 # and objects with many subprocess calls.
1784 tarstream = self._gitcommand(['archive', revision], stream=True)
1785 tarstream = self._gitcommand(['archive', revision], stream=True)
1785 tar = tarfile.open(fileobj=tarstream, mode='r|')
1786 tar = tarfile.open(fileobj=tarstream, mode='r|')
1786 relpath = subrelpath(self)
1787 relpath = subrelpath(self)
1787 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1788 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1788 for i, info in enumerate(tar):
1789 for i, info in enumerate(tar):
1789 if info.isdir():
1790 if info.isdir():
1790 continue
1791 continue
1791 if match and not match(info.name):
1792 if match and not match(info.name):
1792 continue
1793 continue
1793 if info.issym():
1794 if info.issym():
1794 data = info.linkname
1795 data = info.linkname
1795 else:
1796 else:
1796 data = tar.extractfile(info).read()
1797 data = tar.extractfile(info).read()
1797 archiver.addfile(prefix + self._path + '/' + info.name,
1798 archiver.addfile(prefix + self._path + '/' + info.name,
1798 info.mode, info.issym(), data)
1799 info.mode, info.issym(), data)
1799 total += 1
1800 total += 1
1800 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1801 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1801 unit=_('files'))
1802 unit=_('files'))
1802 self.ui.progress(_('archiving (%s)') % relpath, None)
1803 self.ui.progress(_('archiving (%s)') % relpath, None)
1803 return total
1804 return total
1804
1805
1805
1806
1806 @annotatesubrepoerror
1807 @annotatesubrepoerror
1807 def cat(self, match, prefix, **opts):
1808 def cat(self, match, prefix, **opts):
1808 rev = self._state[1]
1809 rev = self._state[1]
1809 if match.anypats():
1810 if match.anypats():
1810 return 1 #No support for include/exclude yet
1811 return 1 #No support for include/exclude yet
1811
1812
1812 if not match.files():
1813 if not match.files():
1813 return 1
1814 return 1
1814
1815
1815 for f in match.files():
1816 for f in match.files():
1816 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1817 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1817 fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
1818 fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
1818 self._ctx.node(),
1819 self._ctx.node(),
1819 pathname=self.wvfs.reljoin(prefix, f))
1820 pathname=self.wvfs.reljoin(prefix, f))
1820 fp.write(output)
1821 fp.write(output)
1821 fp.close()
1822 fp.close()
1822 return 0
1823 return 0
1823
1824
1824
1825
1825 @annotatesubrepoerror
1826 @annotatesubrepoerror
1826 def status(self, rev2, **opts):
1827 def status(self, rev2, **opts):
1827 rev1 = self._state[1]
1828 rev1 = self._state[1]
1828 if self._gitmissing() or not rev1:
1829 if self._gitmissing() or not rev1:
1829 # if the repo is missing, return no results
1830 # if the repo is missing, return no results
1830 return scmutil.status([], [], [], [], [], [], [])
1831 return scmutil.status([], [], [], [], [], [], [])
1831 modified, added, removed = [], [], []
1832 modified, added, removed = [], [], []
1832 self._gitupdatestat()
1833 self._gitupdatestat()
1833 if rev2:
1834 if rev2:
1834 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1835 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1835 else:
1836 else:
1836 command = ['diff-index', '--no-renames', rev1]
1837 command = ['diff-index', '--no-renames', rev1]
1837 out = self._gitcommand(command)
1838 out = self._gitcommand(command)
1838 for line in out.split('\n'):
1839 for line in out.split('\n'):
1839 tab = line.find('\t')
1840 tab = line.find('\t')
1840 if tab == -1:
1841 if tab == -1:
1841 continue
1842 continue
1842 status, f = line[tab - 1], line[tab + 1:]
1843 status, f = line[tab - 1], line[tab + 1:]
1843 if status == 'M':
1844 if status == 'M':
1844 modified.append(f)
1845 modified.append(f)
1845 elif status == 'A':
1846 elif status == 'A':
1846 added.append(f)
1847 added.append(f)
1847 elif status == 'D':
1848 elif status == 'D':
1848 removed.append(f)
1849 removed.append(f)
1849
1850
1850 deleted, unknown, ignored, clean = [], [], [], []
1851 deleted, unknown, ignored, clean = [], [], [], []
1851
1852
1852 command = ['status', '--porcelain', '-z']
1853 command = ['status', '--porcelain', '-z']
1853 if opts.get('unknown'):
1854 if opts.get('unknown'):
1854 command += ['--untracked-files=all']
1855 command += ['--untracked-files=all']
1855 if opts.get('ignored'):
1856 if opts.get('ignored'):
1856 command += ['--ignored']
1857 command += ['--ignored']
1857 out = self._gitcommand(command)
1858 out = self._gitcommand(command)
1858
1859
1859 changedfiles = set()
1860 changedfiles = set()
1860 changedfiles.update(modified)
1861 changedfiles.update(modified)
1861 changedfiles.update(added)
1862 changedfiles.update(added)
1862 changedfiles.update(removed)
1863 changedfiles.update(removed)
1863 for line in out.split('\0'):
1864 for line in out.split('\0'):
1864 if not line:
1865 if not line:
1865 continue
1866 continue
1866 st = line[0:2]
1867 st = line[0:2]
1867 #moves and copies show 2 files on one line
1868 #moves and copies show 2 files on one line
1868 if line.find('\0') >= 0:
1869 if line.find('\0') >= 0:
1869 filename1, filename2 = line[3:].split('\0')
1870 filename1, filename2 = line[3:].split('\0')
1870 else:
1871 else:
1871 filename1 = line[3:]
1872 filename1 = line[3:]
1872 filename2 = None
1873 filename2 = None
1873
1874
1874 changedfiles.add(filename1)
1875 changedfiles.add(filename1)
1875 if filename2:
1876 if filename2:
1876 changedfiles.add(filename2)
1877 changedfiles.add(filename2)
1877
1878
1878 if st == '??':
1879 if st == '??':
1879 unknown.append(filename1)
1880 unknown.append(filename1)
1880 elif st == '!!':
1881 elif st == '!!':
1881 ignored.append(filename1)
1882 ignored.append(filename1)
1882
1883
1883 if opts.get('clean'):
1884 if opts.get('clean'):
1884 out = self._gitcommand(['ls-files'])
1885 out = self._gitcommand(['ls-files'])
1885 for f in out.split('\n'):
1886 for f in out.split('\n'):
1886 if not f in changedfiles:
1887 if not f in changedfiles:
1887 clean.append(f)
1888 clean.append(f)
1888
1889
1889 return scmutil.status(modified, added, removed, deleted,
1890 return scmutil.status(modified, added, removed, deleted,
1890 unknown, ignored, clean)
1891 unknown, ignored, clean)
1891
1892
1892 @annotatesubrepoerror
1893 @annotatesubrepoerror
1893 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1894 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1894 node1 = self._state[1]
1895 node1 = self._state[1]
1895 cmd = ['diff', '--no-renames']
1896 cmd = ['diff', '--no-renames']
1896 if opts['stat']:
1897 if opts['stat']:
1897 cmd.append('--stat')
1898 cmd.append('--stat')
1898 else:
1899 else:
1899 # for Git, this also implies '-p'
1900 # for Git, this also implies '-p'
1900 cmd.append('-U%d' % diffopts.context)
1901 cmd.append('-U%d' % diffopts.context)
1901
1902
1902 gitprefix = self.wvfs.reljoin(prefix, self._path)
1903 gitprefix = self.wvfs.reljoin(prefix, self._path)
1903
1904
1904 if diffopts.noprefix:
1905 if diffopts.noprefix:
1905 cmd.extend(['--src-prefix=%s/' % gitprefix,
1906 cmd.extend(['--src-prefix=%s/' % gitprefix,
1906 '--dst-prefix=%s/' % gitprefix])
1907 '--dst-prefix=%s/' % gitprefix])
1907 else:
1908 else:
1908 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1909 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1909 '--dst-prefix=b/%s/' % gitprefix])
1910 '--dst-prefix=b/%s/' % gitprefix])
1910
1911
1911 if diffopts.ignorews:
1912 if diffopts.ignorews:
1912 cmd.append('--ignore-all-space')
1913 cmd.append('--ignore-all-space')
1913 if diffopts.ignorewsamount:
1914 if diffopts.ignorewsamount:
1914 cmd.append('--ignore-space-change')
1915 cmd.append('--ignore-space-change')
1915 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1916 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1916 and diffopts.ignoreblanklines:
1917 and diffopts.ignoreblanklines:
1917 cmd.append('--ignore-blank-lines')
1918 cmd.append('--ignore-blank-lines')
1918
1919
1919 cmd.append(node1)
1920 cmd.append(node1)
1920 if node2:
1921 if node2:
1921 cmd.append(node2)
1922 cmd.append(node2)
1922
1923
1923 output = ""
1924 output = ""
1924 if match.always():
1925 if match.always():
1925 output += self._gitcommand(cmd) + '\n'
1926 output += self._gitcommand(cmd) + '\n'
1926 else:
1927 else:
1927 st = self.status(node2)[:3]
1928 st = self.status(node2)[:3]
1928 files = [f for sublist in st for f in sublist]
1929 files = [f for sublist in st for f in sublist]
1929 for f in files:
1930 for f in files:
1930 if match(f):
1931 if match(f):
1931 output += self._gitcommand(cmd + ['--', f]) + '\n'
1932 output += self._gitcommand(cmd + ['--', f]) + '\n'
1932
1933
1933 if output.strip():
1934 if output.strip():
1934 ui.write(output)
1935 ui.write(output)
1935
1936
1936 @annotatesubrepoerror
1937 @annotatesubrepoerror
1937 def revert(self, substate, *pats, **opts):
1938 def revert(self, substate, *pats, **opts):
1938 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1939 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1939 if not opts.get('no_backup'):
1940 if not opts.get('no_backup'):
1940 status = self.status(None)
1941 status = self.status(None)
1941 names = status.modified
1942 names = status.modified
1942 for name in names:
1943 for name in names:
1943 bakname = scmutil.origpath(self.ui, self._subparent, name)
1944 bakname = scmutil.origpath(self.ui, self._subparent, name)
1944 self.ui.note(_('saving current version of %s as %s\n') %
1945 self.ui.note(_('saving current version of %s as %s\n') %
1945 (name, bakname))
1946 (name, bakname))
1946 self.wvfs.rename(name, bakname)
1947 self.wvfs.rename(name, bakname)
1947
1948
1948 if not opts.get('dry_run'):
1949 if not opts.get('dry_run'):
1949 self.get(substate, overwrite=True)
1950 self.get(substate, overwrite=True)
1950 return []
1951 return []
1951
1952
1952 def shortid(self, revid):
1953 def shortid(self, revid):
1953 return revid[:7]
1954 return revid[:7]
1954
1955
1955 types = {
1956 types = {
1956 'hg': hgsubrepo,
1957 'hg': hgsubrepo,
1957 'svn': svnsubrepo,
1958 'svn': svnsubrepo,
1958 'git': gitsubrepo,
1959 'git': gitsubrepo,
1959 }
1960 }
@@ -1,222 +1,223 b''
1 # worker.py - master-slave parallelism support
1 # worker.py - master-slave parallelism support
2 #
2 #
3 # Copyright 2013 Facebook, Inc.
3 # Copyright 2013 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import signal
12 import signal
13 import sys
13 import sys
14
14
15 from .i18n import _
15 from .i18n import _
16 from . import (
16 from . import (
17 encoding,
17 error,
18 error,
18 scmutil,
19 scmutil,
19 util,
20 util,
20 )
21 )
21
22
22 def countcpus():
23 def countcpus():
23 '''try to count the number of CPUs on the system'''
24 '''try to count the number of CPUs on the system'''
24
25
25 # posix
26 # posix
26 try:
27 try:
27 n = int(os.sysconf('SC_NPROCESSORS_ONLN'))
28 n = int(os.sysconf('SC_NPROCESSORS_ONLN'))
28 if n > 0:
29 if n > 0:
29 return n
30 return n
30 except (AttributeError, ValueError):
31 except (AttributeError, ValueError):
31 pass
32 pass
32
33
33 # windows
34 # windows
34 try:
35 try:
35 n = int(os.environ['NUMBER_OF_PROCESSORS'])
36 n = int(encoding.environ['NUMBER_OF_PROCESSORS'])
36 if n > 0:
37 if n > 0:
37 return n
38 return n
38 except (KeyError, ValueError):
39 except (KeyError, ValueError):
39 pass
40 pass
40
41
41 return 1
42 return 1
42
43
43 def _numworkers(ui):
44 def _numworkers(ui):
44 s = ui.config('worker', 'numcpus')
45 s = ui.config('worker', 'numcpus')
45 if s:
46 if s:
46 try:
47 try:
47 n = int(s)
48 n = int(s)
48 if n >= 1:
49 if n >= 1:
49 return n
50 return n
50 except ValueError:
51 except ValueError:
51 raise error.Abort(_('number of cpus must be an integer'))
52 raise error.Abort(_('number of cpus must be an integer'))
52 return min(max(countcpus(), 4), 32)
53 return min(max(countcpus(), 4), 32)
53
54
54 if os.name == 'posix':
55 if os.name == 'posix':
55 _startupcost = 0.01
56 _startupcost = 0.01
56 else:
57 else:
57 _startupcost = 1e30
58 _startupcost = 1e30
58
59
59 def worthwhile(ui, costperop, nops):
60 def worthwhile(ui, costperop, nops):
60 '''try to determine whether the benefit of multiple processes can
61 '''try to determine whether the benefit of multiple processes can
61 outweigh the cost of starting them'''
62 outweigh the cost of starting them'''
62 linear = costperop * nops
63 linear = costperop * nops
63 workers = _numworkers(ui)
64 workers = _numworkers(ui)
64 benefit = linear - (_startupcost * workers + linear / workers)
65 benefit = linear - (_startupcost * workers + linear / workers)
65 return benefit >= 0.15
66 return benefit >= 0.15
66
67
67 def worker(ui, costperarg, func, staticargs, args):
68 def worker(ui, costperarg, func, staticargs, args):
68 '''run a function, possibly in parallel in multiple worker
69 '''run a function, possibly in parallel in multiple worker
69 processes.
70 processes.
70
71
71 returns a progress iterator
72 returns a progress iterator
72
73
73 costperarg - cost of a single task
74 costperarg - cost of a single task
74
75
75 func - function to run
76 func - function to run
76
77
77 staticargs - arguments to pass to every invocation of the function
78 staticargs - arguments to pass to every invocation of the function
78
79
79 args - arguments to split into chunks, to pass to individual
80 args - arguments to split into chunks, to pass to individual
80 workers
81 workers
81 '''
82 '''
82 if worthwhile(ui, costperarg, len(args)):
83 if worthwhile(ui, costperarg, len(args)):
83 return _platformworker(ui, func, staticargs, args)
84 return _platformworker(ui, func, staticargs, args)
84 return func(*staticargs + (args,))
85 return func(*staticargs + (args,))
85
86
86 def _posixworker(ui, func, staticargs, args):
87 def _posixworker(ui, func, staticargs, args):
87 rfd, wfd = os.pipe()
88 rfd, wfd = os.pipe()
88 workers = _numworkers(ui)
89 workers = _numworkers(ui)
89 oldhandler = signal.getsignal(signal.SIGINT)
90 oldhandler = signal.getsignal(signal.SIGINT)
90 signal.signal(signal.SIGINT, signal.SIG_IGN)
91 signal.signal(signal.SIGINT, signal.SIG_IGN)
91 pids, problem = set(), [0]
92 pids, problem = set(), [0]
92 def killworkers():
93 def killworkers():
93 # unregister SIGCHLD handler as all children will be killed. This
94 # unregister SIGCHLD handler as all children will be killed. This
94 # function shouldn't be interrupted by another SIGCHLD; otherwise pids
95 # function shouldn't be interrupted by another SIGCHLD; otherwise pids
95 # could be updated while iterating, which would cause inconsistency.
96 # could be updated while iterating, which would cause inconsistency.
96 signal.signal(signal.SIGCHLD, oldchldhandler)
97 signal.signal(signal.SIGCHLD, oldchldhandler)
97 # if one worker bails, there's no good reason to wait for the rest
98 # if one worker bails, there's no good reason to wait for the rest
98 for p in pids:
99 for p in pids:
99 try:
100 try:
100 os.kill(p, signal.SIGTERM)
101 os.kill(p, signal.SIGTERM)
101 except OSError as err:
102 except OSError as err:
102 if err.errno != errno.ESRCH:
103 if err.errno != errno.ESRCH:
103 raise
104 raise
104 def waitforworkers(blocking=True):
105 def waitforworkers(blocking=True):
105 for pid in pids.copy():
106 for pid in pids.copy():
106 p = st = 0
107 p = st = 0
107 while True:
108 while True:
108 try:
109 try:
109 p, st = os.waitpid(pid, (0 if blocking else os.WNOHANG))
110 p, st = os.waitpid(pid, (0 if blocking else os.WNOHANG))
110 break
111 break
111 except OSError as e:
112 except OSError as e:
112 if e.errno == errno.EINTR:
113 if e.errno == errno.EINTR:
113 continue
114 continue
114 elif e.errno == errno.ECHILD:
115 elif e.errno == errno.ECHILD:
115 # child would already be reaped, but pids yet been
116 # child would already be reaped, but pids yet been
116 # updated (maybe interrupted just after waitpid)
117 # updated (maybe interrupted just after waitpid)
117 pids.discard(pid)
118 pids.discard(pid)
118 break
119 break
119 else:
120 else:
120 raise
121 raise
121 if p:
122 if p:
122 pids.discard(p)
123 pids.discard(p)
123 st = _exitstatus(st)
124 st = _exitstatus(st)
124 if st and not problem[0]:
125 if st and not problem[0]:
125 problem[0] = st
126 problem[0] = st
126 def sigchldhandler(signum, frame):
127 def sigchldhandler(signum, frame):
127 waitforworkers(blocking=False)
128 waitforworkers(blocking=False)
128 if problem[0]:
129 if problem[0]:
129 killworkers()
130 killworkers()
130 oldchldhandler = signal.signal(signal.SIGCHLD, sigchldhandler)
131 oldchldhandler = signal.signal(signal.SIGCHLD, sigchldhandler)
131 for pargs in partition(args, workers):
132 for pargs in partition(args, workers):
132 pid = os.fork()
133 pid = os.fork()
133 if pid == 0:
134 if pid == 0:
134 signal.signal(signal.SIGINT, oldhandler)
135 signal.signal(signal.SIGINT, oldhandler)
135 signal.signal(signal.SIGCHLD, oldchldhandler)
136 signal.signal(signal.SIGCHLD, oldchldhandler)
136
137
137 def workerfunc():
138 def workerfunc():
138 os.close(rfd)
139 os.close(rfd)
139 for i, item in func(*(staticargs + (pargs,))):
140 for i, item in func(*(staticargs + (pargs,))):
140 os.write(wfd, '%d %s\n' % (i, item))
141 os.write(wfd, '%d %s\n' % (i, item))
141
142
142 # make sure we use os._exit in all code paths. otherwise the worker
143 # make sure we use os._exit in all code paths. otherwise the worker
143 # may do some clean-ups which could cause surprises like deadlock.
144 # may do some clean-ups which could cause surprises like deadlock.
144 # see sshpeer.cleanup for example.
145 # see sshpeer.cleanup for example.
145 try:
146 try:
146 scmutil.callcatch(ui, workerfunc)
147 scmutil.callcatch(ui, workerfunc)
147 except KeyboardInterrupt:
148 except KeyboardInterrupt:
148 os._exit(255)
149 os._exit(255)
149 except: # never return, therefore no re-raises
150 except: # never return, therefore no re-raises
150 try:
151 try:
151 ui.traceback()
152 ui.traceback()
152 finally:
153 finally:
153 os._exit(255)
154 os._exit(255)
154 else:
155 else:
155 os._exit(0)
156 os._exit(0)
156 pids.add(pid)
157 pids.add(pid)
157 os.close(wfd)
158 os.close(wfd)
158 fp = os.fdopen(rfd, 'rb', 0)
159 fp = os.fdopen(rfd, 'rb', 0)
159 def cleanup():
160 def cleanup():
160 signal.signal(signal.SIGINT, oldhandler)
161 signal.signal(signal.SIGINT, oldhandler)
161 waitforworkers()
162 waitforworkers()
162 signal.signal(signal.SIGCHLD, oldchldhandler)
163 signal.signal(signal.SIGCHLD, oldchldhandler)
163 status = problem[0]
164 status = problem[0]
164 if status:
165 if status:
165 if status < 0:
166 if status < 0:
166 os.kill(os.getpid(), -status)
167 os.kill(os.getpid(), -status)
167 sys.exit(status)
168 sys.exit(status)
168 try:
169 try:
169 for line in util.iterfile(fp):
170 for line in util.iterfile(fp):
170 l = line.split(' ', 1)
171 l = line.split(' ', 1)
171 yield int(l[0]), l[1][:-1]
172 yield int(l[0]), l[1][:-1]
172 except: # re-raises
173 except: # re-raises
173 killworkers()
174 killworkers()
174 cleanup()
175 cleanup()
175 raise
176 raise
176 cleanup()
177 cleanup()
177
178
178 def _posixexitstatus(code):
179 def _posixexitstatus(code):
179 '''convert a posix exit status into the same form returned by
180 '''convert a posix exit status into the same form returned by
180 os.spawnv
181 os.spawnv
181
182
182 returns None if the process was stopped instead of exiting'''
183 returns None if the process was stopped instead of exiting'''
183 if os.WIFEXITED(code):
184 if os.WIFEXITED(code):
184 return os.WEXITSTATUS(code)
185 return os.WEXITSTATUS(code)
185 elif os.WIFSIGNALED(code):
186 elif os.WIFSIGNALED(code):
186 return -os.WTERMSIG(code)
187 return -os.WTERMSIG(code)
187
188
188 if os.name != 'nt':
189 if os.name != 'nt':
189 _platformworker = _posixworker
190 _platformworker = _posixworker
190 _exitstatus = _posixexitstatus
191 _exitstatus = _posixexitstatus
191
192
192 def partition(lst, nslices):
193 def partition(lst, nslices):
193 '''partition a list into N slices of roughly equal size
194 '''partition a list into N slices of roughly equal size
194
195
195 The current strategy takes every Nth element from the input. If
196 The current strategy takes every Nth element from the input. If
196 we ever write workers that need to preserve grouping in input
197 we ever write workers that need to preserve grouping in input
197 we should consider allowing callers to specify a partition strategy.
198 we should consider allowing callers to specify a partition strategy.
198
199
199 mpm is not a fan of this partitioning strategy when files are involved.
200 mpm is not a fan of this partitioning strategy when files are involved.
200 In his words:
201 In his words:
201
202
202 Single-threaded Mercurial makes a point of creating and visiting
203 Single-threaded Mercurial makes a point of creating and visiting
203 files in a fixed order (alphabetical). When creating files in order,
204 files in a fixed order (alphabetical). When creating files in order,
204 a typical filesystem is likely to allocate them on nearby regions on
205 a typical filesystem is likely to allocate them on nearby regions on
205 disk. Thus, when revisiting in the same order, locality is maximized
206 disk. Thus, when revisiting in the same order, locality is maximized
206 and various forms of OS and disk-level caching and read-ahead get a
207 and various forms of OS and disk-level caching and read-ahead get a
207 chance to work.
208 chance to work.
208
209
209 This effect can be quite significant on spinning disks. I discovered it
210 This effect can be quite significant on spinning disks. I discovered it
210 circa Mercurial v0.4 when revlogs were named by hashes of filenames.
211 circa Mercurial v0.4 when revlogs were named by hashes of filenames.
211 Tarring a repo and copying it to another disk effectively randomized
212 Tarring a repo and copying it to another disk effectively randomized
212 the revlog ordering on disk by sorting the revlogs by hash and suddenly
213 the revlog ordering on disk by sorting the revlogs by hash and suddenly
213 performance of my kernel checkout benchmark dropped by ~10x because the
214 performance of my kernel checkout benchmark dropped by ~10x because the
214 "working set" of sectors visited no longer fit in the drive's cache and
215 "working set" of sectors visited no longer fit in the drive's cache and
215 the workload switched from streaming to random I/O.
216 the workload switched from streaming to random I/O.
216
217
217 What we should really be doing is have workers read filenames from a
218 What we should really be doing is have workers read filenames from a
218 ordered queue. This preserves locality and also keeps any worker from
219 ordered queue. This preserves locality and also keeps any worker from
219 getting more than one file out of balance.
220 getting more than one file out of balance.
220 '''
221 '''
221 for i in range(nslices):
222 for i in range(nslices):
222 yield lst[i::nslices]
223 yield lst[i::nslices]
General Comments 0
You need to be logged in to leave comments. Login now