##// END OF EJS Templates
cleanup: replace naked excepts with more specific ones
Brodie Rao -
r16688:cfb66829 default
parent child Browse files
Show More
@@ -1,373 +1,373 b''
1 #
1 #
2 # This is an experimental py3k-enabled mercurial setup script.
2 # This is an experimental py3k-enabled mercurial setup script.
3 #
3 #
4 # 'python setup.py install', or
4 # 'python setup.py install', or
5 # 'python setup.py --help' for more options
5 # 'python setup.py --help' for more options
6
6
7 from distutils.command.build_py import build_py_2to3
7 from distutils.command.build_py import build_py_2to3
8 from lib2to3.refactor import get_fixers_from_package as getfixers
8 from lib2to3.refactor import get_fixers_from_package as getfixers
9
9
10 import sys
10 import sys
11 if getattr(sys, 'version_info', (0, 0, 0)) < (2, 4, 0, 'final'):
11 if getattr(sys, 'version_info', (0, 0, 0)) < (2, 4, 0, 'final'):
12 raise SystemExit("Mercurial requires Python 2.4 or later.")
12 raise SystemExit("Mercurial requires Python 2.4 or later.")
13
13
14 if sys.version_info[0] >= 3:
14 if sys.version_info[0] >= 3:
15 def b(s):
15 def b(s):
16 '''A helper function to emulate 2.6+ bytes literals using string
16 '''A helper function to emulate 2.6+ bytes literals using string
17 literals.'''
17 literals.'''
18 return s.encode('latin1')
18 return s.encode('latin1')
19 else:
19 else:
20 def b(s):
20 def b(s):
21 '''A helper function to emulate 2.6+ bytes literals using string
21 '''A helper function to emulate 2.6+ bytes literals using string
22 literals.'''
22 literals.'''
23 return s
23 return s
24
24
25 # Solaris Python packaging brain damage
25 # Solaris Python packaging brain damage
26 try:
26 try:
27 import hashlib
27 import hashlib
28 sha = hashlib.sha1()
28 sha = hashlib.sha1()
29 except:
29 except ImportError:
30 try:
30 try:
31 import sha
31 import sha
32 except:
32 except ImportError:
33 raise SystemExit(
33 raise SystemExit(
34 "Couldn't import standard hashlib (incomplete Python install).")
34 "Couldn't import standard hashlib (incomplete Python install).")
35
35
36 try:
36 try:
37 import zlib
37 import zlib
38 except:
38 except ImportError:
39 raise SystemExit(
39 raise SystemExit(
40 "Couldn't import standard zlib (incomplete Python install).")
40 "Couldn't import standard zlib (incomplete Python install).")
41
41
42 try:
42 try:
43 import bz2
43 import bz2
44 except:
44 except ImportError:
45 raise SystemExit(
45 raise SystemExit(
46 "Couldn't import standard bz2 (incomplete Python install).")
46 "Couldn't import standard bz2 (incomplete Python install).")
47
47
48 import os, subprocess, time
48 import os, subprocess, time
49 import shutil
49 import shutil
50 import tempfile
50 import tempfile
51 from distutils import log
51 from distutils import log
52 from distutils.core import setup, Extension
52 from distutils.core import setup, Extension
53 from distutils.dist import Distribution
53 from distutils.dist import Distribution
54 from distutils.command.build import build
54 from distutils.command.build import build
55 from distutils.command.build_ext import build_ext
55 from distutils.command.build_ext import build_ext
56 from distutils.command.build_py import build_py
56 from distutils.command.build_py import build_py
57 from distutils.spawn import spawn, find_executable
57 from distutils.spawn import spawn, find_executable
58 from distutils.ccompiler import new_compiler
58 from distutils.ccompiler import new_compiler
59 from distutils.errors import CCompilerError
59 from distutils.errors import CCompilerError
60
60
61 scripts = ['hg']
61 scripts = ['hg']
62 if os.name == 'nt':
62 if os.name == 'nt':
63 scripts.append('contrib/win32/hg.bat')
63 scripts.append('contrib/win32/hg.bat')
64
64
65 # simplified version of distutils.ccompiler.CCompiler.has_function
65 # simplified version of distutils.ccompiler.CCompiler.has_function
66 # that actually removes its temporary files.
66 # that actually removes its temporary files.
67 def hasfunction(cc, funcname):
67 def hasfunction(cc, funcname):
68 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
68 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
69 devnull = oldstderr = None
69 devnull = oldstderr = None
70 try:
70 try:
71 try:
71 try:
72 fname = os.path.join(tmpdir, 'funcname.c')
72 fname = os.path.join(tmpdir, 'funcname.c')
73 f = open(fname, 'w')
73 f = open(fname, 'w')
74 f.write('int main(void) {\n')
74 f.write('int main(void) {\n')
75 f.write(' %s();\n' % funcname)
75 f.write(' %s();\n' % funcname)
76 f.write('}\n')
76 f.write('}\n')
77 f.close()
77 f.close()
78 # Redirect stderr to /dev/null to hide any error messages
78 # Redirect stderr to /dev/null to hide any error messages
79 # from the compiler.
79 # from the compiler.
80 # This will have to be changed if we ever have to check
80 # This will have to be changed if we ever have to check
81 # for a function on Windows.
81 # for a function on Windows.
82 devnull = open('/dev/null', 'w')
82 devnull = open('/dev/null', 'w')
83 oldstderr = os.dup(sys.stderr.fileno())
83 oldstderr = os.dup(sys.stderr.fileno())
84 os.dup2(devnull.fileno(), sys.stderr.fileno())
84 os.dup2(devnull.fileno(), sys.stderr.fileno())
85 objects = cc.compile([fname], output_dir=tmpdir)
85 objects = cc.compile([fname], output_dir=tmpdir)
86 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
86 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
87 except:
87 except:
88 return False
88 return False
89 return True
89 return True
90 finally:
90 finally:
91 if oldstderr is not None:
91 if oldstderr is not None:
92 os.dup2(oldstderr, sys.stderr.fileno())
92 os.dup2(oldstderr, sys.stderr.fileno())
93 if devnull is not None:
93 if devnull is not None:
94 devnull.close()
94 devnull.close()
95 shutil.rmtree(tmpdir)
95 shutil.rmtree(tmpdir)
96
96
97 # py2exe needs to be installed to work
97 # py2exe needs to be installed to work
98 try:
98 try:
99 import py2exe
99 import py2exe
100 py2exeloaded = True
100 py2exeloaded = True
101
101
102 # Help py2exe to find win32com.shell
102 # Help py2exe to find win32com.shell
103 try:
103 try:
104 import modulefinder
104 import modulefinder
105 import win32com
105 import win32com
106 for p in win32com.__path__[1:]: # Take the path to win32comext
106 for p in win32com.__path__[1:]: # Take the path to win32comext
107 modulefinder.AddPackagePath("win32com", p)
107 modulefinder.AddPackagePath("win32com", p)
108 pn = "win32com.shell"
108 pn = "win32com.shell"
109 __import__(pn)
109 __import__(pn)
110 m = sys.modules[pn]
110 m = sys.modules[pn]
111 for p in m.__path__[1:]:
111 for p in m.__path__[1:]:
112 modulefinder.AddPackagePath(pn, p)
112 modulefinder.AddPackagePath(pn, p)
113 except ImportError:
113 except ImportError:
114 pass
114 pass
115
115
116 except ImportError:
116 except ImportError:
117 py2exeloaded = False
117 py2exeloaded = False
118 pass
118 pass
119
119
120 def runcmd(cmd, env):
120 def runcmd(cmd, env):
121 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
121 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
122 stderr=subprocess.PIPE, env=env)
122 stderr=subprocess.PIPE, env=env)
123 out, err = p.communicate()
123 out, err = p.communicate()
124 # If root is executing setup.py, but the repository is owned by
124 # If root is executing setup.py, but the repository is owned by
125 # another user (as in "sudo python setup.py install") we will get
125 # another user (as in "sudo python setup.py install") we will get
126 # trust warnings since the .hg/hgrc file is untrusted. That is
126 # trust warnings since the .hg/hgrc file is untrusted. That is
127 # fine, we don't want to load it anyway. Python may warn about
127 # fine, we don't want to load it anyway. Python may warn about
128 # a missing __init__.py in mercurial/locale, we also ignore that.
128 # a missing __init__.py in mercurial/locale, we also ignore that.
129 err = [e for e in err.splitlines()
129 err = [e for e in err.splitlines()
130 if not e.startswith(b('Not trusting file')) \
130 if not e.startswith(b('Not trusting file')) \
131 and not e.startswith(b('warning: Not importing'))]
131 and not e.startswith(b('warning: Not importing'))]
132 if err:
132 if err:
133 return ''
133 return ''
134 return out
134 return out
135
135
136 version = ''
136 version = ''
137
137
138 if os.path.isdir('.hg'):
138 if os.path.isdir('.hg'):
139 # Execute hg out of this directory with a custom environment which
139 # Execute hg out of this directory with a custom environment which
140 # includes the pure Python modules in mercurial/pure. We also take
140 # includes the pure Python modules in mercurial/pure. We also take
141 # care to not use any hgrc files and do no localization.
141 # care to not use any hgrc files and do no localization.
142 pypath = ['mercurial', os.path.join('mercurial', 'pure')]
142 pypath = ['mercurial', os.path.join('mercurial', 'pure')]
143 env = {'PYTHONPATH': os.pathsep.join(pypath),
143 env = {'PYTHONPATH': os.pathsep.join(pypath),
144 'HGRCPATH': '',
144 'HGRCPATH': '',
145 'LANGUAGE': 'C'}
145 'LANGUAGE': 'C'}
146 if 'LD_LIBRARY_PATH' in os.environ:
146 if 'LD_LIBRARY_PATH' in os.environ:
147 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
147 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
148 if 'SystemRoot' in os.environ:
148 if 'SystemRoot' in os.environ:
149 # Copy SystemRoot into the custom environment for Python 2.6
149 # Copy SystemRoot into the custom environment for Python 2.6
150 # under Windows. Otherwise, the subprocess will fail with
150 # under Windows. Otherwise, the subprocess will fail with
151 # error 0xc0150004. See: http://bugs.python.org/issue3440
151 # error 0xc0150004. See: http://bugs.python.org/issue3440
152 env['SystemRoot'] = os.environ['SystemRoot']
152 env['SystemRoot'] = os.environ['SystemRoot']
153 cmd = [sys.executable, 'hg', 'id', '-i', '-t']
153 cmd = [sys.executable, 'hg', 'id', '-i', '-t']
154 l = runcmd(cmd, env).split()
154 l = runcmd(cmd, env).split()
155 while len(l) > 1 and l[-1][0].isalpha(): # remove non-numbered tags
155 while len(l) > 1 and l[-1][0].isalpha(): # remove non-numbered tags
156 l.pop()
156 l.pop()
157 if len(l) > 1: # tag found
157 if len(l) > 1: # tag found
158 version = l[-1]
158 version = l[-1]
159 if l[0].endswith('+'): # propagate the dirty status to the tag
159 if l[0].endswith('+'): # propagate the dirty status to the tag
160 version += '+'
160 version += '+'
161 elif len(l) == 1: # no tag found
161 elif len(l) == 1: # no tag found
162 cmd = [sys.executable, 'hg', 'parents', '--template',
162 cmd = [sys.executable, 'hg', 'parents', '--template',
163 '{latesttag}+{latesttagdistance}-']
163 '{latesttag}+{latesttagdistance}-']
164 version = runcmd(cmd, env) + l[0]
164 version = runcmd(cmd, env) + l[0]
165 if version.endswith('+'):
165 if version.endswith('+'):
166 version += time.strftime('%Y%m%d')
166 version += time.strftime('%Y%m%d')
167 elif os.path.exists('.hg_archival.txt'):
167 elif os.path.exists('.hg_archival.txt'):
168 kw = dict([[t.strip() for t in l.split(':', 1)]
168 kw = dict([[t.strip() for t in l.split(':', 1)]
169 for l in open('.hg_archival.txt')])
169 for l in open('.hg_archival.txt')])
170 if 'tag' in kw:
170 if 'tag' in kw:
171 version = kw['tag']
171 version = kw['tag']
172 elif 'latesttag' in kw:
172 elif 'latesttag' in kw:
173 version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
173 version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
174 else:
174 else:
175 version = kw.get('node', '')[:12]
175 version = kw.get('node', '')[:12]
176
176
177 if version:
177 if version:
178 f = open("mercurial/__version__.py", "w")
178 f = open("mercurial/__version__.py", "w")
179 f.write('# this file is autogenerated by setup.py\n')
179 f.write('# this file is autogenerated by setup.py\n')
180 f.write('version = "%s"\n' % version)
180 f.write('version = "%s"\n' % version)
181 f.close()
181 f.close()
182
182
183
183
184 try:
184 try:
185 from mercurial import __version__
185 from mercurial import __version__
186 version = __version__.version
186 version = __version__.version
187 except ImportError:
187 except ImportError:
188 version = 'unknown'
188 version = 'unknown'
189
189
190 class hgbuildmo(build):
190 class hgbuildmo(build):
191
191
192 description = "build translations (.mo files)"
192 description = "build translations (.mo files)"
193
193
194 def run(self):
194 def run(self):
195 if not find_executable('msgfmt'):
195 if not find_executable('msgfmt'):
196 self.warn("could not find msgfmt executable, no translations "
196 self.warn("could not find msgfmt executable, no translations "
197 "will be built")
197 "will be built")
198 return
198 return
199
199
200 podir = 'i18n'
200 podir = 'i18n'
201 if not os.path.isdir(podir):
201 if not os.path.isdir(podir):
202 self.warn("could not find %s/ directory" % podir)
202 self.warn("could not find %s/ directory" % podir)
203 return
203 return
204
204
205 join = os.path.join
205 join = os.path.join
206 for po in os.listdir(podir):
206 for po in os.listdir(podir):
207 if not po.endswith('.po'):
207 if not po.endswith('.po'):
208 continue
208 continue
209 pofile = join(podir, po)
209 pofile = join(podir, po)
210 modir = join('locale', po[:-3], 'LC_MESSAGES')
210 modir = join('locale', po[:-3], 'LC_MESSAGES')
211 mofile = join(modir, 'hg.mo')
211 mofile = join(modir, 'hg.mo')
212 mobuildfile = join('mercurial', mofile)
212 mobuildfile = join('mercurial', mofile)
213 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
213 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
214 if sys.platform != 'sunos5':
214 if sys.platform != 'sunos5':
215 # msgfmt on Solaris does not know about -c
215 # msgfmt on Solaris does not know about -c
216 cmd.append('-c')
216 cmd.append('-c')
217 self.mkpath(join('mercurial', modir))
217 self.mkpath(join('mercurial', modir))
218 self.make_file([pofile], mobuildfile, spawn, (cmd,))
218 self.make_file([pofile], mobuildfile, spawn, (cmd,))
219
219
220 # Insert hgbuildmo first so that files in mercurial/locale/ are found
220 # Insert hgbuildmo first so that files in mercurial/locale/ are found
221 # when build_py is run next.
221 # when build_py is run next.
222 build.sub_commands.insert(0, ('build_mo', None))
222 build.sub_commands.insert(0, ('build_mo', None))
223 # We also need build_ext before build_py. Otherwise, when 2to3 is called (in
223 # We also need build_ext before build_py. Otherwise, when 2to3 is called (in
224 # build_py), it will not find osutil & friends, thinking that those modules are
224 # build_py), it will not find osutil & friends, thinking that those modules are
225 # global and, consequently, making a mess, now that all module imports are
225 # global and, consequently, making a mess, now that all module imports are
226 # global.
226 # global.
227 build.sub_commands.insert(1, ('build_ext', None))
227 build.sub_commands.insert(1, ('build_ext', None))
228
228
229 Distribution.pure = 0
229 Distribution.pure = 0
230 Distribution.global_options.append(('pure', None, "use pure (slow) Python "
230 Distribution.global_options.append(('pure', None, "use pure (slow) Python "
231 "code instead of C extensions"))
231 "code instead of C extensions"))
232
232
233 class hgbuildext(build_ext):
233 class hgbuildext(build_ext):
234
234
235 def build_extension(self, ext):
235 def build_extension(self, ext):
236 try:
236 try:
237 build_ext.build_extension(self, ext)
237 build_ext.build_extension(self, ext)
238 except CCompilerError:
238 except CCompilerError:
239 if getattr(ext, 'optional', False):
239 if getattr(ext, 'optional', False):
240 raise
240 raise
241 log.warn("Failed to build optional extension '%s' (skipping)",
241 log.warn("Failed to build optional extension '%s' (skipping)",
242 ext.name)
242 ext.name)
243
243
244 class hgbuildpy(build_py_2to3):
244 class hgbuildpy(build_py_2to3):
245 fixer_names = sorted(set(getfixers("lib2to3.fixes") +
245 fixer_names = sorted(set(getfixers("lib2to3.fixes") +
246 getfixers("hgfixes")))
246 getfixers("hgfixes")))
247
247
248 def finalize_options(self):
248 def finalize_options(self):
249 build_py.finalize_options(self)
249 build_py.finalize_options(self)
250
250
251 if self.distribution.pure:
251 if self.distribution.pure:
252 if self.py_modules is None:
252 if self.py_modules is None:
253 self.py_modules = []
253 self.py_modules = []
254 for ext in self.distribution.ext_modules:
254 for ext in self.distribution.ext_modules:
255 if ext.name.startswith("mercurial."):
255 if ext.name.startswith("mercurial."):
256 self.py_modules.append("mercurial.pure.%s" % ext.name[10:])
256 self.py_modules.append("mercurial.pure.%s" % ext.name[10:])
257 self.distribution.ext_modules = []
257 self.distribution.ext_modules = []
258
258
259 def find_modules(self):
259 def find_modules(self):
260 modules = build_py.find_modules(self)
260 modules = build_py.find_modules(self)
261 for module in modules:
261 for module in modules:
262 if module[0] == "mercurial.pure":
262 if module[0] == "mercurial.pure":
263 if module[1] != "__init__":
263 if module[1] != "__init__":
264 yield ("mercurial", module[1], module[2])
264 yield ("mercurial", module[1], module[2])
265 else:
265 else:
266 yield module
266 yield module
267
267
268 def run(self):
268 def run(self):
269 # In the build_py_2to3 class, self.updated_files = [], but I couldn't
269 # In the build_py_2to3 class, self.updated_files = [], but I couldn't
270 # see when that variable was updated to point to the updated files, as
270 # see when that variable was updated to point to the updated files, as
271 # its names suggests. Thus, I decided to just find_all_modules and feed
271 # its names suggests. Thus, I decided to just find_all_modules and feed
272 # them to 2to3. Unfortunately, subsequent calls to setup3k.py will
272 # them to 2to3. Unfortunately, subsequent calls to setup3k.py will
273 # incur in 2to3 analysis overhead.
273 # incur in 2to3 analysis overhead.
274 self.updated_files = [i[2] for i in self.find_all_modules()]
274 self.updated_files = [i[2] for i in self.find_all_modules()]
275
275
276 # Base class code
276 # Base class code
277 if self.py_modules:
277 if self.py_modules:
278 self.build_modules()
278 self.build_modules()
279 if self.packages:
279 if self.packages:
280 self.build_packages()
280 self.build_packages()
281 self.build_package_data()
281 self.build_package_data()
282
282
283 # 2to3
283 # 2to3
284 self.run_2to3(self.updated_files)
284 self.run_2to3(self.updated_files)
285
285
286 # Remaining base class code
286 # Remaining base class code
287 self.byte_compile(self.get_outputs(include_bytecode=0))
287 self.byte_compile(self.get_outputs(include_bytecode=0))
288
288
289 cmdclass = {'build_mo': hgbuildmo,
289 cmdclass = {'build_mo': hgbuildmo,
290 'build_ext': hgbuildext,
290 'build_ext': hgbuildext,
291 'build_py': hgbuildpy}
291 'build_py': hgbuildpy}
292
292
293 packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert',
293 packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert',
294 'hgext.highlight', 'hgext.zeroconf']
294 'hgext.highlight', 'hgext.zeroconf']
295
295
296 pymodules = []
296 pymodules = []
297
297
298 extmodules = [
298 extmodules = [
299 Extension('mercurial.base85', ['mercurial/base85.c']),
299 Extension('mercurial.base85', ['mercurial/base85.c']),
300 Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
300 Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
301 Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']),
301 Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']),
302 Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
302 Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
303 Extension('mercurial.parsers', ['mercurial/parsers.c']),
303 Extension('mercurial.parsers', ['mercurial/parsers.c']),
304 ]
304 ]
305
305
306 # disable osutil.c under windows + python 2.4 (issue1364)
306 # disable osutil.c under windows + python 2.4 (issue1364)
307 if sys.platform == 'win32' and sys.version_info < (2, 5, 0, 'final'):
307 if sys.platform == 'win32' and sys.version_info < (2, 5, 0, 'final'):
308 pymodules.append('mercurial.pure.osutil')
308 pymodules.append('mercurial.pure.osutil')
309 else:
309 else:
310 extmodules.append(Extension('mercurial.osutil', ['mercurial/osutil.c']))
310 extmodules.append(Extension('mercurial.osutil', ['mercurial/osutil.c']))
311
311
312 if sys.platform.startswith('linux') and os.uname()[2] > '2.6':
312 if sys.platform.startswith('linux') and os.uname()[2] > '2.6':
313 # The inotify extension is only usable with Linux 2.6 kernels.
313 # The inotify extension is only usable with Linux 2.6 kernels.
314 # You also need a reasonably recent C library.
314 # You also need a reasonably recent C library.
315 # In any case, if it fails to build the error will be skipped ('optional').
315 # In any case, if it fails to build the error will be skipped ('optional').
316 cc = new_compiler()
316 cc = new_compiler()
317 if hasfunction(cc, 'inotify_add_watch'):
317 if hasfunction(cc, 'inotify_add_watch'):
318 inotify = Extension('hgext.inotify.linux._inotify',
318 inotify = Extension('hgext.inotify.linux._inotify',
319 ['hgext/inotify/linux/_inotify.c'],
319 ['hgext/inotify/linux/_inotify.c'],
320 ['mercurial'])
320 ['mercurial'])
321 inotify.optional = True
321 inotify.optional = True
322 extmodules.append(inotify)
322 extmodules.append(inotify)
323 packages.extend(['hgext.inotify', 'hgext.inotify.linux'])
323 packages.extend(['hgext.inotify', 'hgext.inotify.linux'])
324
324
325 packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
325 packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
326 'help/*.txt']}
326 'help/*.txt']}
327
327
328 def ordinarypath(p):
328 def ordinarypath(p):
329 return p and p[0] != '.' and p[-1] != '~'
329 return p and p[0] != '.' and p[-1] != '~'
330
330
331 for root in ('templates',):
331 for root in ('templates',):
332 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
332 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
333 curdir = curdir.split(os.sep, 1)[1]
333 curdir = curdir.split(os.sep, 1)[1]
334 dirs[:] = filter(ordinarypath, dirs)
334 dirs[:] = filter(ordinarypath, dirs)
335 for f in filter(ordinarypath, files):
335 for f in filter(ordinarypath, files):
336 f = os.path.join(curdir, f)
336 f = os.path.join(curdir, f)
337 packagedata['mercurial'].append(f)
337 packagedata['mercurial'].append(f)
338
338
339 datafiles = []
339 datafiles = []
340 setupversion = version
340 setupversion = version
341 extra = {}
341 extra = {}
342
342
343 if py2exeloaded:
343 if py2exeloaded:
344 extra['console'] = [
344 extra['console'] = [
345 {'script':'hg',
345 {'script':'hg',
346 'copyright':'Copyright (C) 2005-2010 Matt Mackall and others',
346 'copyright':'Copyright (C) 2005-2010 Matt Mackall and others',
347 'product_version':version}]
347 'product_version':version}]
348
348
349 if os.name == 'nt':
349 if os.name == 'nt':
350 # Windows binary file versions for exe/dll files must have the
350 # Windows binary file versions for exe/dll files must have the
351 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
351 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
352 setupversion = version.split('+', 1)[0]
352 setupversion = version.split('+', 1)[0]
353
353
354 setup(name='mercurial',
354 setup(name='mercurial',
355 version=setupversion,
355 version=setupversion,
356 author='Matt Mackall',
356 author='Matt Mackall',
357 author_email='mpm@selenic.com',
357 author_email='mpm@selenic.com',
358 url='http://mercurial.selenic.com/',
358 url='http://mercurial.selenic.com/',
359 description='Scalable distributed SCM',
359 description='Scalable distributed SCM',
360 license='GNU GPLv2+',
360 license='GNU GPLv2+',
361 scripts=scripts,
361 scripts=scripts,
362 packages=packages,
362 packages=packages,
363 py_modules=pymodules,
363 py_modules=pymodules,
364 ext_modules=extmodules,
364 ext_modules=extmodules,
365 data_files=datafiles,
365 data_files=datafiles,
366 package_data=packagedata,
366 package_data=packagedata,
367 cmdclass=cmdclass,
367 cmdclass=cmdclass,
368 options=dict(py2exe=dict(packages=['hgext', 'email']),
368 options=dict(py2exe=dict(packages=['hgext', 'email']),
369 bdist_mpkg=dict(zipdist=True,
369 bdist_mpkg=dict(zipdist=True,
370 license='COPYING',
370 license='COPYING',
371 readme='contrib/macosx/Readme.html',
371 readme='contrib/macosx/Readme.html',
372 welcome='contrib/macosx/Welcome.html')),
372 welcome='contrib/macosx/Welcome.html')),
373 **extra)
373 **extra)
@@ -1,445 +1,445 b''
1 # common.py - common code for the convert extension
1 # common.py - common code for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import base64, errno
8 import base64, errno
9 import os
9 import os
10 import cPickle as pickle
10 import cPickle as pickle
11 from mercurial import util
11 from mercurial import util
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13
13
14 propertycache = util.propertycache
14 propertycache = util.propertycache
15
15
16 def encodeargs(args):
16 def encodeargs(args):
17 def encodearg(s):
17 def encodearg(s):
18 lines = base64.encodestring(s)
18 lines = base64.encodestring(s)
19 lines = [l.splitlines()[0] for l in lines]
19 lines = [l.splitlines()[0] for l in lines]
20 return ''.join(lines)
20 return ''.join(lines)
21
21
22 s = pickle.dumps(args)
22 s = pickle.dumps(args)
23 return encodearg(s)
23 return encodearg(s)
24
24
25 def decodeargs(s):
25 def decodeargs(s):
26 s = base64.decodestring(s)
26 s = base64.decodestring(s)
27 return pickle.loads(s)
27 return pickle.loads(s)
28
28
29 class MissingTool(Exception):
29 class MissingTool(Exception):
30 pass
30 pass
31
31
32 def checktool(exe, name=None, abort=True):
32 def checktool(exe, name=None, abort=True):
33 name = name or exe
33 name = name or exe
34 if not util.findexe(exe):
34 if not util.findexe(exe):
35 exc = abort and util.Abort or MissingTool
35 exc = abort and util.Abort or MissingTool
36 raise exc(_('cannot find required "%s" tool') % name)
36 raise exc(_('cannot find required "%s" tool') % name)
37
37
38 class NoRepo(Exception):
38 class NoRepo(Exception):
39 pass
39 pass
40
40
41 SKIPREV = 'SKIP'
41 SKIPREV = 'SKIP'
42
42
43 class commit(object):
43 class commit(object):
44 def __init__(self, author, date, desc, parents, branch=None, rev=None,
44 def __init__(self, author, date, desc, parents, branch=None, rev=None,
45 extra={}, sortkey=None):
45 extra={}, sortkey=None):
46 self.author = author or 'unknown'
46 self.author = author or 'unknown'
47 self.date = date or '0 0'
47 self.date = date or '0 0'
48 self.desc = desc
48 self.desc = desc
49 self.parents = parents
49 self.parents = parents
50 self.branch = branch
50 self.branch = branch
51 self.rev = rev
51 self.rev = rev
52 self.extra = extra
52 self.extra = extra
53 self.sortkey = sortkey
53 self.sortkey = sortkey
54
54
55 class converter_source(object):
55 class converter_source(object):
56 """Conversion source interface"""
56 """Conversion source interface"""
57
57
58 def __init__(self, ui, path=None, rev=None):
58 def __init__(self, ui, path=None, rev=None):
59 """Initialize conversion source (or raise NoRepo("message")
59 """Initialize conversion source (or raise NoRepo("message")
60 exception if path is not a valid repository)"""
60 exception if path is not a valid repository)"""
61 self.ui = ui
61 self.ui = ui
62 self.path = path
62 self.path = path
63 self.rev = rev
63 self.rev = rev
64
64
65 self.encoding = 'utf-8'
65 self.encoding = 'utf-8'
66
66
67 def before(self):
67 def before(self):
68 pass
68 pass
69
69
70 def after(self):
70 def after(self):
71 pass
71 pass
72
72
73 def setrevmap(self, revmap):
73 def setrevmap(self, revmap):
74 """set the map of already-converted revisions"""
74 """set the map of already-converted revisions"""
75 pass
75 pass
76
76
77 def getheads(self):
77 def getheads(self):
78 """Return a list of this repository's heads"""
78 """Return a list of this repository's heads"""
79 raise NotImplementedError
79 raise NotImplementedError
80
80
81 def getfile(self, name, rev):
81 def getfile(self, name, rev):
82 """Return a pair (data, mode) where data is the file content
82 """Return a pair (data, mode) where data is the file content
83 as a string and mode one of '', 'x' or 'l'. rev is the
83 as a string and mode one of '', 'x' or 'l'. rev is the
84 identifier returned by a previous call to getchanges(). Raise
84 identifier returned by a previous call to getchanges(). Raise
85 IOError to indicate that name was deleted in rev.
85 IOError to indicate that name was deleted in rev.
86 """
86 """
87 raise NotImplementedError
87 raise NotImplementedError
88
88
89 def getchanges(self, version):
89 def getchanges(self, version):
90 """Returns a tuple of (files, copies).
90 """Returns a tuple of (files, copies).
91
91
92 files is a sorted list of (filename, id) tuples for all files
92 files is a sorted list of (filename, id) tuples for all files
93 changed between version and its first parent returned by
93 changed between version and its first parent returned by
94 getcommit(). id is the source revision id of the file.
94 getcommit(). id is the source revision id of the file.
95
95
96 copies is a dictionary of dest: source
96 copies is a dictionary of dest: source
97 """
97 """
98 raise NotImplementedError
98 raise NotImplementedError
99
99
100 def getcommit(self, version):
100 def getcommit(self, version):
101 """Return the commit object for version"""
101 """Return the commit object for version"""
102 raise NotImplementedError
102 raise NotImplementedError
103
103
104 def gettags(self):
104 def gettags(self):
105 """Return the tags as a dictionary of name: revision
105 """Return the tags as a dictionary of name: revision
106
106
107 Tag names must be UTF-8 strings.
107 Tag names must be UTF-8 strings.
108 """
108 """
109 raise NotImplementedError
109 raise NotImplementedError
110
110
111 def recode(self, s, encoding=None):
111 def recode(self, s, encoding=None):
112 if not encoding:
112 if not encoding:
113 encoding = self.encoding or 'utf-8'
113 encoding = self.encoding or 'utf-8'
114
114
115 if isinstance(s, unicode):
115 if isinstance(s, unicode):
116 return s.encode("utf-8")
116 return s.encode("utf-8")
117 try:
117 try:
118 return s.decode(encoding).encode("utf-8")
118 return s.decode(encoding).encode("utf-8")
119 except:
119 except UnicodeError:
120 try:
120 try:
121 return s.decode("latin-1").encode("utf-8")
121 return s.decode("latin-1").encode("utf-8")
122 except:
122 except UnicodeError:
123 return s.decode(encoding, "replace").encode("utf-8")
123 return s.decode(encoding, "replace").encode("utf-8")
124
124
125 def getchangedfiles(self, rev, i):
125 def getchangedfiles(self, rev, i):
126 """Return the files changed by rev compared to parent[i].
126 """Return the files changed by rev compared to parent[i].
127
127
128 i is an index selecting one of the parents of rev. The return
128 i is an index selecting one of the parents of rev. The return
129 value should be the list of files that are different in rev and
129 value should be the list of files that are different in rev and
130 this parent.
130 this parent.
131
131
132 If rev has no parents, i is None.
132 If rev has no parents, i is None.
133
133
134 This function is only needed to support --filemap
134 This function is only needed to support --filemap
135 """
135 """
136 raise NotImplementedError
136 raise NotImplementedError
137
137
138 def converted(self, rev, sinkrev):
138 def converted(self, rev, sinkrev):
139 '''Notify the source that a revision has been converted.'''
139 '''Notify the source that a revision has been converted.'''
140 pass
140 pass
141
141
142 def hasnativeorder(self):
142 def hasnativeorder(self):
143 """Return true if this source has a meaningful, native revision
143 """Return true if this source has a meaningful, native revision
144 order. For instance, Mercurial revisions are store sequentially
144 order. For instance, Mercurial revisions are store sequentially
145 while there is no such global ordering with Darcs.
145 while there is no such global ordering with Darcs.
146 """
146 """
147 return False
147 return False
148
148
149 def lookuprev(self, rev):
149 def lookuprev(self, rev):
150 """If rev is a meaningful revision reference in source, return
150 """If rev is a meaningful revision reference in source, return
151 the referenced identifier in the same format used by getcommit().
151 the referenced identifier in the same format used by getcommit().
152 return None otherwise.
152 return None otherwise.
153 """
153 """
154 return None
154 return None
155
155
156 def getbookmarks(self):
156 def getbookmarks(self):
157 """Return the bookmarks as a dictionary of name: revision
157 """Return the bookmarks as a dictionary of name: revision
158
158
159 Bookmark names are to be UTF-8 strings.
159 Bookmark names are to be UTF-8 strings.
160 """
160 """
161 return {}
161 return {}
162
162
163 class converter_sink(object):
163 class converter_sink(object):
164 """Conversion sink (target) interface"""
164 """Conversion sink (target) interface"""
165
165
166 def __init__(self, ui, path):
166 def __init__(self, ui, path):
167 """Initialize conversion sink (or raise NoRepo("message")
167 """Initialize conversion sink (or raise NoRepo("message")
168 exception if path is not a valid repository)
168 exception if path is not a valid repository)
169
169
170 created is a list of paths to remove if a fatal error occurs
170 created is a list of paths to remove if a fatal error occurs
171 later"""
171 later"""
172 self.ui = ui
172 self.ui = ui
173 self.path = path
173 self.path = path
174 self.created = []
174 self.created = []
175
175
176 def getheads(self):
176 def getheads(self):
177 """Return a list of this repository's heads"""
177 """Return a list of this repository's heads"""
178 raise NotImplementedError
178 raise NotImplementedError
179
179
180 def revmapfile(self):
180 def revmapfile(self):
181 """Path to a file that will contain lines
181 """Path to a file that will contain lines
182 source_rev_id sink_rev_id
182 source_rev_id sink_rev_id
183 mapping equivalent revision identifiers for each system."""
183 mapping equivalent revision identifiers for each system."""
184 raise NotImplementedError
184 raise NotImplementedError
185
185
186 def authorfile(self):
186 def authorfile(self):
187 """Path to a file that will contain lines
187 """Path to a file that will contain lines
188 srcauthor=dstauthor
188 srcauthor=dstauthor
189 mapping equivalent authors identifiers for each system."""
189 mapping equivalent authors identifiers for each system."""
190 return None
190 return None
191
191
192 def putcommit(self, files, copies, parents, commit, source, revmap):
192 def putcommit(self, files, copies, parents, commit, source, revmap):
193 """Create a revision with all changed files listed in 'files'
193 """Create a revision with all changed files listed in 'files'
194 and having listed parents. 'commit' is a commit object
194 and having listed parents. 'commit' is a commit object
195 containing at a minimum the author, date, and message for this
195 containing at a minimum the author, date, and message for this
196 changeset. 'files' is a list of (path, version) tuples,
196 changeset. 'files' is a list of (path, version) tuples,
197 'copies' is a dictionary mapping destinations to sources,
197 'copies' is a dictionary mapping destinations to sources,
198 'source' is the source repository, and 'revmap' is a mapfile
198 'source' is the source repository, and 'revmap' is a mapfile
199 of source revisions to converted revisions. Only getfile() and
199 of source revisions to converted revisions. Only getfile() and
200 lookuprev() should be called on 'source'.
200 lookuprev() should be called on 'source'.
201
201
202 Note that the sink repository is not told to update itself to
202 Note that the sink repository is not told to update itself to
203 a particular revision (or even what that revision would be)
203 a particular revision (or even what that revision would be)
204 before it receives the file data.
204 before it receives the file data.
205 """
205 """
206 raise NotImplementedError
206 raise NotImplementedError
207
207
208 def puttags(self, tags):
208 def puttags(self, tags):
209 """Put tags into sink.
209 """Put tags into sink.
210
210
211 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
211 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
212 Return a pair (tag_revision, tag_parent_revision), or (None, None)
212 Return a pair (tag_revision, tag_parent_revision), or (None, None)
213 if nothing was changed.
213 if nothing was changed.
214 """
214 """
215 raise NotImplementedError
215 raise NotImplementedError
216
216
217 def setbranch(self, branch, pbranches):
217 def setbranch(self, branch, pbranches):
218 """Set the current branch name. Called before the first putcommit
218 """Set the current branch name. Called before the first putcommit
219 on the branch.
219 on the branch.
220 branch: branch name for subsequent commits
220 branch: branch name for subsequent commits
221 pbranches: (converted parent revision, parent branch) tuples"""
221 pbranches: (converted parent revision, parent branch) tuples"""
222 pass
222 pass
223
223
224 def setfilemapmode(self, active):
224 def setfilemapmode(self, active):
225 """Tell the destination that we're using a filemap
225 """Tell the destination that we're using a filemap
226
226
227 Some converter_sources (svn in particular) can claim that a file
227 Some converter_sources (svn in particular) can claim that a file
228 was changed in a revision, even if there was no change. This method
228 was changed in a revision, even if there was no change. This method
229 tells the destination that we're using a filemap and that it should
229 tells the destination that we're using a filemap and that it should
230 filter empty revisions.
230 filter empty revisions.
231 """
231 """
232 pass
232 pass
233
233
234 def before(self):
234 def before(self):
235 pass
235 pass
236
236
237 def after(self):
237 def after(self):
238 pass
238 pass
239
239
240 def putbookmarks(self, bookmarks):
240 def putbookmarks(self, bookmarks):
241 """Put bookmarks into sink.
241 """Put bookmarks into sink.
242
242
243 bookmarks: {bookmarkname: sink_rev_id, ...}
243 bookmarks: {bookmarkname: sink_rev_id, ...}
244 where bookmarkname is an UTF-8 string.
244 where bookmarkname is an UTF-8 string.
245 """
245 """
246 pass
246 pass
247
247
248 def hascommit(self, rev):
248 def hascommit(self, rev):
249 """Return True if the sink contains rev"""
249 """Return True if the sink contains rev"""
250 raise NotImplementedError
250 raise NotImplementedError
251
251
252 class commandline(object):
252 class commandline(object):
253 def __init__(self, ui, command):
253 def __init__(self, ui, command):
254 self.ui = ui
254 self.ui = ui
255 self.command = command
255 self.command = command
256
256
257 def prerun(self):
257 def prerun(self):
258 pass
258 pass
259
259
260 def postrun(self):
260 def postrun(self):
261 pass
261 pass
262
262
263 def _cmdline(self, cmd, closestdin, *args, **kwargs):
263 def _cmdline(self, cmd, closestdin, *args, **kwargs):
264 cmdline = [self.command, cmd] + list(args)
264 cmdline = [self.command, cmd] + list(args)
265 for k, v in kwargs.iteritems():
265 for k, v in kwargs.iteritems():
266 if len(k) == 1:
266 if len(k) == 1:
267 cmdline.append('-' + k)
267 cmdline.append('-' + k)
268 else:
268 else:
269 cmdline.append('--' + k.replace('_', '-'))
269 cmdline.append('--' + k.replace('_', '-'))
270 try:
270 try:
271 if len(k) == 1:
271 if len(k) == 1:
272 cmdline.append('' + v)
272 cmdline.append('' + v)
273 else:
273 else:
274 cmdline[-1] += '=' + v
274 cmdline[-1] += '=' + v
275 except TypeError:
275 except TypeError:
276 pass
276 pass
277 cmdline = [util.shellquote(arg) for arg in cmdline]
277 cmdline = [util.shellquote(arg) for arg in cmdline]
278 if not self.ui.debugflag:
278 if not self.ui.debugflag:
279 cmdline += ['2>', util.nulldev]
279 cmdline += ['2>', util.nulldev]
280 if closestdin:
280 if closestdin:
281 cmdline += ['<', util.nulldev]
281 cmdline += ['<', util.nulldev]
282 cmdline = ' '.join(cmdline)
282 cmdline = ' '.join(cmdline)
283 return cmdline
283 return cmdline
284
284
285 def _run(self, cmd, *args, **kwargs):
285 def _run(self, cmd, *args, **kwargs):
286 return self._dorun(util.popen, cmd, True, *args, **kwargs)
286 return self._dorun(util.popen, cmd, True, *args, **kwargs)
287
287
288 def _run2(self, cmd, *args, **kwargs):
288 def _run2(self, cmd, *args, **kwargs):
289 return self._dorun(util.popen2, cmd, False, *args, **kwargs)
289 return self._dorun(util.popen2, cmd, False, *args, **kwargs)
290
290
291 def _dorun(self, openfunc, cmd, closestdin, *args, **kwargs):
291 def _dorun(self, openfunc, cmd, closestdin, *args, **kwargs):
292 cmdline = self._cmdline(cmd, closestdin, *args, **kwargs)
292 cmdline = self._cmdline(cmd, closestdin, *args, **kwargs)
293 self.ui.debug('running: %s\n' % (cmdline,))
293 self.ui.debug('running: %s\n' % (cmdline,))
294 self.prerun()
294 self.prerun()
295 try:
295 try:
296 return openfunc(cmdline)
296 return openfunc(cmdline)
297 finally:
297 finally:
298 self.postrun()
298 self.postrun()
299
299
300 def run(self, cmd, *args, **kwargs):
300 def run(self, cmd, *args, **kwargs):
301 fp = self._run(cmd, *args, **kwargs)
301 fp = self._run(cmd, *args, **kwargs)
302 output = fp.read()
302 output = fp.read()
303 self.ui.debug(output)
303 self.ui.debug(output)
304 return output, fp.close()
304 return output, fp.close()
305
305
306 def runlines(self, cmd, *args, **kwargs):
306 def runlines(self, cmd, *args, **kwargs):
307 fp = self._run(cmd, *args, **kwargs)
307 fp = self._run(cmd, *args, **kwargs)
308 output = fp.readlines()
308 output = fp.readlines()
309 self.ui.debug(''.join(output))
309 self.ui.debug(''.join(output))
310 return output, fp.close()
310 return output, fp.close()
311
311
312 def checkexit(self, status, output=''):
312 def checkexit(self, status, output=''):
313 if status:
313 if status:
314 if output:
314 if output:
315 self.ui.warn(_('%s error:\n') % self.command)
315 self.ui.warn(_('%s error:\n') % self.command)
316 self.ui.warn(output)
316 self.ui.warn(output)
317 msg = util.explainexit(status)[0]
317 msg = util.explainexit(status)[0]
318 raise util.Abort('%s %s' % (self.command, msg))
318 raise util.Abort('%s %s' % (self.command, msg))
319
319
320 def run0(self, cmd, *args, **kwargs):
320 def run0(self, cmd, *args, **kwargs):
321 output, status = self.run(cmd, *args, **kwargs)
321 output, status = self.run(cmd, *args, **kwargs)
322 self.checkexit(status, output)
322 self.checkexit(status, output)
323 return output
323 return output
324
324
325 def runlines0(self, cmd, *args, **kwargs):
325 def runlines0(self, cmd, *args, **kwargs):
326 output, status = self.runlines(cmd, *args, **kwargs)
326 output, status = self.runlines(cmd, *args, **kwargs)
327 self.checkexit(status, ''.join(output))
327 self.checkexit(status, ''.join(output))
328 return output
328 return output
329
329
330 @propertycache
330 @propertycache
331 def argmax(self):
331 def argmax(self):
332 # POSIX requires at least 4096 bytes for ARG_MAX
332 # POSIX requires at least 4096 bytes for ARG_MAX
333 argmax = 4096
333 argmax = 4096
334 try:
334 try:
335 argmax = os.sysconf("SC_ARG_MAX")
335 argmax = os.sysconf("SC_ARG_MAX")
336 except:
336 except (AttributeError, ValueError):
337 pass
337 pass
338
338
339 # Windows shells impose their own limits on command line length,
339 # Windows shells impose their own limits on command line length,
340 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
340 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
341 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
341 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
342 # details about cmd.exe limitations.
342 # details about cmd.exe limitations.
343
343
344 # Since ARG_MAX is for command line _and_ environment, lower our limit
344 # Since ARG_MAX is for command line _and_ environment, lower our limit
345 # (and make happy Windows shells while doing this).
345 # (and make happy Windows shells while doing this).
346 return argmax // 2 - 1
346 return argmax // 2 - 1
347
347
348 def limit_arglist(self, arglist, cmd, closestdin, *args, **kwargs):
348 def limit_arglist(self, arglist, cmd, closestdin, *args, **kwargs):
349 cmdlen = len(self._cmdline(cmd, closestdin, *args, **kwargs))
349 cmdlen = len(self._cmdline(cmd, closestdin, *args, **kwargs))
350 limit = self.argmax - cmdlen
350 limit = self.argmax - cmdlen
351 bytes = 0
351 bytes = 0
352 fl = []
352 fl = []
353 for fn in arglist:
353 for fn in arglist:
354 b = len(fn) + 3
354 b = len(fn) + 3
355 if bytes + b < limit or len(fl) == 0:
355 if bytes + b < limit or len(fl) == 0:
356 fl.append(fn)
356 fl.append(fn)
357 bytes += b
357 bytes += b
358 else:
358 else:
359 yield fl
359 yield fl
360 fl = [fn]
360 fl = [fn]
361 bytes = b
361 bytes = b
362 if fl:
362 if fl:
363 yield fl
363 yield fl
364
364
365 def xargs(self, arglist, cmd, *args, **kwargs):
365 def xargs(self, arglist, cmd, *args, **kwargs):
366 for l in self.limit_arglist(arglist, cmd, True, *args, **kwargs):
366 for l in self.limit_arglist(arglist, cmd, True, *args, **kwargs):
367 self.run0(cmd, *(list(args) + l), **kwargs)
367 self.run0(cmd, *(list(args) + l), **kwargs)
368
368
369 class mapfile(dict):
369 class mapfile(dict):
370 def __init__(self, ui, path):
370 def __init__(self, ui, path):
371 super(mapfile, self).__init__()
371 super(mapfile, self).__init__()
372 self.ui = ui
372 self.ui = ui
373 self.path = path
373 self.path = path
374 self.fp = None
374 self.fp = None
375 self.order = []
375 self.order = []
376 self._read()
376 self._read()
377
377
378 def _read(self):
378 def _read(self):
379 if not self.path:
379 if not self.path:
380 return
380 return
381 try:
381 try:
382 fp = open(self.path, 'r')
382 fp = open(self.path, 'r')
383 except IOError, err:
383 except IOError, err:
384 if err.errno != errno.ENOENT:
384 if err.errno != errno.ENOENT:
385 raise
385 raise
386 return
386 return
387 for i, line in enumerate(fp):
387 for i, line in enumerate(fp):
388 line = line.splitlines()[0].rstrip()
388 line = line.splitlines()[0].rstrip()
389 if not line:
389 if not line:
390 # Ignore blank lines
390 # Ignore blank lines
391 continue
391 continue
392 try:
392 try:
393 key, value = line.rsplit(' ', 1)
393 key, value = line.rsplit(' ', 1)
394 except ValueError:
394 except ValueError:
395 raise util.Abort(
395 raise util.Abort(
396 _('syntax error in %s(%d): key/value pair expected')
396 _('syntax error in %s(%d): key/value pair expected')
397 % (self.path, i + 1))
397 % (self.path, i + 1))
398 if key not in self:
398 if key not in self:
399 self.order.append(key)
399 self.order.append(key)
400 super(mapfile, self).__setitem__(key, value)
400 super(mapfile, self).__setitem__(key, value)
401 fp.close()
401 fp.close()
402
402
403 def __setitem__(self, key, value):
403 def __setitem__(self, key, value):
404 if self.fp is None:
404 if self.fp is None:
405 try:
405 try:
406 self.fp = open(self.path, 'a')
406 self.fp = open(self.path, 'a')
407 except IOError, err:
407 except IOError, err:
408 raise util.Abort(_('could not open map file %r: %s') %
408 raise util.Abort(_('could not open map file %r: %s') %
409 (self.path, err.strerror))
409 (self.path, err.strerror))
410 self.fp.write('%s %s\n' % (key, value))
410 self.fp.write('%s %s\n' % (key, value))
411 self.fp.flush()
411 self.fp.flush()
412 super(mapfile, self).__setitem__(key, value)
412 super(mapfile, self).__setitem__(key, value)
413
413
414 def close(self):
414 def close(self):
415 if self.fp:
415 if self.fp:
416 self.fp.close()
416 self.fp.close()
417 self.fp = None
417 self.fp = None
418
418
419 def parsesplicemap(path):
419 def parsesplicemap(path):
420 """Parse a splicemap, return a child/parents dictionary."""
420 """Parse a splicemap, return a child/parents dictionary."""
421 if not path:
421 if not path:
422 return {}
422 return {}
423 m = {}
423 m = {}
424 try:
424 try:
425 fp = open(path, 'r')
425 fp = open(path, 'r')
426 for i, line in enumerate(fp):
426 for i, line in enumerate(fp):
427 line = line.splitlines()[0].rstrip()
427 line = line.splitlines()[0].rstrip()
428 if not line:
428 if not line:
429 # Ignore blank lines
429 # Ignore blank lines
430 continue
430 continue
431 try:
431 try:
432 child, parents = line.split(' ', 1)
432 child, parents = line.split(' ', 1)
433 parents = parents.replace(',', ' ').split()
433 parents = parents.replace(',', ' ').split()
434 except ValueError:
434 except ValueError:
435 raise util.Abort(_('syntax error in %s(%d): child parent1'
435 raise util.Abort(_('syntax error in %s(%d): child parent1'
436 '[,parent2] expected') % (path, i + 1))
436 '[,parent2] expected') % (path, i + 1))
437 pp = []
437 pp = []
438 for p in parents:
438 for p in parents:
439 if p not in pp:
439 if p not in pp:
440 pp.append(p)
440 pp.append(p)
441 m[child] = pp
441 m[child] = pp
442 except IOError, e:
442 except IOError, e:
443 if e.errno != errno.ENOENT:
443 if e.errno != errno.ENOENT:
444 raise
444 raise
445 return m
445 return m
@@ -1,853 +1,853 b''
1 # Mercurial built-in replacement for cvsps.
1 # Mercurial built-in replacement for cvsps.
2 #
2 #
3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import os
8 import os
9 import re
9 import re
10 import cPickle as pickle
10 import cPickle as pickle
11 from mercurial import util
11 from mercurial import util
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial import hook
13 from mercurial import hook
14 from mercurial import util
14 from mercurial import util
15
15
16 class logentry(object):
16 class logentry(object):
17 '''Class logentry has the following attributes:
17 '''Class logentry has the following attributes:
18 .author - author name as CVS knows it
18 .author - author name as CVS knows it
19 .branch - name of branch this revision is on
19 .branch - name of branch this revision is on
20 .branches - revision tuple of branches starting at this revision
20 .branches - revision tuple of branches starting at this revision
21 .comment - commit message
21 .comment - commit message
22 .date - the commit date as a (time, tz) tuple
22 .date - the commit date as a (time, tz) tuple
23 .dead - true if file revision is dead
23 .dead - true if file revision is dead
24 .file - Name of file
24 .file - Name of file
25 .lines - a tuple (+lines, -lines) or None
25 .lines - a tuple (+lines, -lines) or None
26 .parent - Previous revision of this entry
26 .parent - Previous revision of this entry
27 .rcs - name of file as returned from CVS
27 .rcs - name of file as returned from CVS
28 .revision - revision number as tuple
28 .revision - revision number as tuple
29 .tags - list of tags on the file
29 .tags - list of tags on the file
30 .synthetic - is this a synthetic "file ... added on ..." revision?
30 .synthetic - is this a synthetic "file ... added on ..." revision?
31 .mergepoint- the branch that has been merged from
31 .mergepoint- the branch that has been merged from
32 (if present in rlog output)
32 (if present in rlog output)
33 .branchpoints- the branches that start at the current entry
33 .branchpoints- the branches that start at the current entry
34 '''
34 '''
35 def __init__(self, **entries):
35 def __init__(self, **entries):
36 self.synthetic = False
36 self.synthetic = False
37 self.__dict__.update(entries)
37 self.__dict__.update(entries)
38
38
39 def __repr__(self):
39 def __repr__(self):
40 return "<%s at 0x%x: %s %s>" % (self.__class__.__name__,
40 return "<%s at 0x%x: %s %s>" % (self.__class__.__name__,
41 id(self),
41 id(self),
42 self.file,
42 self.file,
43 ".".join(map(str, self.revision)))
43 ".".join(map(str, self.revision)))
44
44
45 class logerror(Exception):
45 class logerror(Exception):
46 pass
46 pass
47
47
48 def getrepopath(cvspath):
48 def getrepopath(cvspath):
49 """Return the repository path from a CVS path.
49 """Return the repository path from a CVS path.
50
50
51 >>> getrepopath('/foo/bar')
51 >>> getrepopath('/foo/bar')
52 '/foo/bar'
52 '/foo/bar'
53 >>> getrepopath('c:/foo/bar')
53 >>> getrepopath('c:/foo/bar')
54 'c:/foo/bar'
54 'c:/foo/bar'
55 >>> getrepopath(':pserver:10/foo/bar')
55 >>> getrepopath(':pserver:10/foo/bar')
56 '/foo/bar'
56 '/foo/bar'
57 >>> getrepopath(':pserver:10c:/foo/bar')
57 >>> getrepopath(':pserver:10c:/foo/bar')
58 '/foo/bar'
58 '/foo/bar'
59 >>> getrepopath(':pserver:/foo/bar')
59 >>> getrepopath(':pserver:/foo/bar')
60 '/foo/bar'
60 '/foo/bar'
61 >>> getrepopath(':pserver:c:/foo/bar')
61 >>> getrepopath(':pserver:c:/foo/bar')
62 'c:/foo/bar'
62 'c:/foo/bar'
63 >>> getrepopath(':pserver:truc@foo.bar:/foo/bar')
63 >>> getrepopath(':pserver:truc@foo.bar:/foo/bar')
64 '/foo/bar'
64 '/foo/bar'
65 >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar')
65 >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar')
66 'c:/foo/bar'
66 'c:/foo/bar'
67 """
67 """
68 # According to CVS manual, CVS paths are expressed like:
68 # According to CVS manual, CVS paths are expressed like:
69 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
69 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
70 #
70 #
71 # Unfortunately, Windows absolute paths start with a drive letter
71 # Unfortunately, Windows absolute paths start with a drive letter
72 # like 'c:' making it harder to parse. Here we assume that drive
72 # like 'c:' making it harder to parse. Here we assume that drive
73 # letters are only one character long and any CVS component before
73 # letters are only one character long and any CVS component before
74 # the repository path is at least 2 characters long, and use this
74 # the repository path is at least 2 characters long, and use this
75 # to disambiguate.
75 # to disambiguate.
76 parts = cvspath.split(':')
76 parts = cvspath.split(':')
77 if len(parts) == 1:
77 if len(parts) == 1:
78 return parts[0]
78 return parts[0]
79 # Here there is an ambiguous case if we have a port number
79 # Here there is an ambiguous case if we have a port number
80 # immediately followed by a Windows driver letter. We assume this
80 # immediately followed by a Windows driver letter. We assume this
81 # never happens and decide it must be CVS path component,
81 # never happens and decide it must be CVS path component,
82 # therefore ignoring it.
82 # therefore ignoring it.
83 if len(parts[-2]) > 1:
83 if len(parts[-2]) > 1:
84 return parts[-1].lstrip('0123456789')
84 return parts[-1].lstrip('0123456789')
85 return parts[-2] + ':' + parts[-1]
85 return parts[-2] + ':' + parts[-1]
86
86
87 def createlog(ui, directory=None, root="", rlog=True, cache=None):
87 def createlog(ui, directory=None, root="", rlog=True, cache=None):
88 '''Collect the CVS rlog'''
88 '''Collect the CVS rlog'''
89
89
90 # Because we store many duplicate commit log messages, reusing strings
90 # Because we store many duplicate commit log messages, reusing strings
91 # saves a lot of memory and pickle storage space.
91 # saves a lot of memory and pickle storage space.
92 _scache = {}
92 _scache = {}
93 def scache(s):
93 def scache(s):
94 "return a shared version of a string"
94 "return a shared version of a string"
95 return _scache.setdefault(s, s)
95 return _scache.setdefault(s, s)
96
96
97 ui.status(_('collecting CVS rlog\n'))
97 ui.status(_('collecting CVS rlog\n'))
98
98
99 log = [] # list of logentry objects containing the CVS state
99 log = [] # list of logentry objects containing the CVS state
100
100
101 # patterns to match in CVS (r)log output, by state of use
101 # patterns to match in CVS (r)log output, by state of use
102 re_00 = re.compile('RCS file: (.+)$')
102 re_00 = re.compile('RCS file: (.+)$')
103 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
103 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
104 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
104 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
105 re_03 = re.compile("(Cannot access.+CVSROOT)|"
105 re_03 = re.compile("(Cannot access.+CVSROOT)|"
106 "(can't create temporary directory.+)$")
106 "(can't create temporary directory.+)$")
107 re_10 = re.compile('Working file: (.+)$')
107 re_10 = re.compile('Working file: (.+)$')
108 re_20 = re.compile('symbolic names:')
108 re_20 = re.compile('symbolic names:')
109 re_30 = re.compile('\t(.+): ([\\d.]+)$')
109 re_30 = re.compile('\t(.+): ([\\d.]+)$')
110 re_31 = re.compile('----------------------------$')
110 re_31 = re.compile('----------------------------$')
111 re_32 = re.compile('======================================='
111 re_32 = re.compile('======================================='
112 '======================================$')
112 '======================================$')
113 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
113 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
114 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
114 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
115 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
115 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
116 r'(.*mergepoint:\s+([^;]+);)?')
116 r'(.*mergepoint:\s+([^;]+);)?')
117 re_70 = re.compile('branches: (.+);$')
117 re_70 = re.compile('branches: (.+);$')
118
118
119 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
119 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
120
120
121 prefix = '' # leading path to strip of what we get from CVS
121 prefix = '' # leading path to strip of what we get from CVS
122
122
123 if directory is None:
123 if directory is None:
124 # Current working directory
124 # Current working directory
125
125
126 # Get the real directory in the repository
126 # Get the real directory in the repository
127 try:
127 try:
128 prefix = open(os.path.join('CVS','Repository')).read().strip()
128 prefix = open(os.path.join('CVS','Repository')).read().strip()
129 directory = prefix
129 directory = prefix
130 if prefix == ".":
130 if prefix == ".":
131 prefix = ""
131 prefix = ""
132 except IOError:
132 except IOError:
133 raise logerror(_('not a CVS sandbox'))
133 raise logerror(_('not a CVS sandbox'))
134
134
135 if prefix and not prefix.endswith(os.sep):
135 if prefix and not prefix.endswith(os.sep):
136 prefix += os.sep
136 prefix += os.sep
137
137
138 # Use the Root file in the sandbox, if it exists
138 # Use the Root file in the sandbox, if it exists
139 try:
139 try:
140 root = open(os.path.join('CVS','Root')).read().strip()
140 root = open(os.path.join('CVS','Root')).read().strip()
141 except IOError:
141 except IOError:
142 pass
142 pass
143
143
144 if not root:
144 if not root:
145 root = os.environ.get('CVSROOT', '')
145 root = os.environ.get('CVSROOT', '')
146
146
147 # read log cache if one exists
147 # read log cache if one exists
148 oldlog = []
148 oldlog = []
149 date = None
149 date = None
150
150
151 if cache:
151 if cache:
152 cachedir = os.path.expanduser('~/.hg.cvsps')
152 cachedir = os.path.expanduser('~/.hg.cvsps')
153 if not os.path.exists(cachedir):
153 if not os.path.exists(cachedir):
154 os.mkdir(cachedir)
154 os.mkdir(cachedir)
155
155
156 # The cvsps cache pickle needs a uniquified name, based on the
156 # The cvsps cache pickle needs a uniquified name, based on the
157 # repository location. The address may have all sort of nasties
157 # repository location. The address may have all sort of nasties
158 # in it, slashes, colons and such. So here we take just the
158 # in it, slashes, colons and such. So here we take just the
159 # alphanumerics, concatenated in a way that does not mix up the
159 # alphanumerics, concatenated in a way that does not mix up the
160 # various components, so that
160 # various components, so that
161 # :pserver:user@server:/path
161 # :pserver:user@server:/path
162 # and
162 # and
163 # /pserver/user/server/path
163 # /pserver/user/server/path
164 # are mapped to different cache file names.
164 # are mapped to different cache file names.
165 cachefile = root.split(":") + [directory, "cache"]
165 cachefile = root.split(":") + [directory, "cache"]
166 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
166 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
167 cachefile = os.path.join(cachedir,
167 cachefile = os.path.join(cachedir,
168 '.'.join([s for s in cachefile if s]))
168 '.'.join([s for s in cachefile if s]))
169
169
170 if cache == 'update':
170 if cache == 'update':
171 try:
171 try:
172 ui.note(_('reading cvs log cache %s\n') % cachefile)
172 ui.note(_('reading cvs log cache %s\n') % cachefile)
173 oldlog = pickle.load(open(cachefile))
173 oldlog = pickle.load(open(cachefile))
174 ui.note(_('cache has %d log entries\n') % len(oldlog))
174 ui.note(_('cache has %d log entries\n') % len(oldlog))
175 except Exception, e:
175 except Exception, e:
176 ui.note(_('error reading cache: %r\n') % e)
176 ui.note(_('error reading cache: %r\n') % e)
177
177
178 if oldlog:
178 if oldlog:
179 date = oldlog[-1].date # last commit date as a (time,tz) tuple
179 date = oldlog[-1].date # last commit date as a (time,tz) tuple
180 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
180 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
181
181
182 # build the CVS commandline
182 # build the CVS commandline
183 cmd = ['cvs', '-q']
183 cmd = ['cvs', '-q']
184 if root:
184 if root:
185 cmd.append('-d%s' % root)
185 cmd.append('-d%s' % root)
186 p = util.normpath(getrepopath(root))
186 p = util.normpath(getrepopath(root))
187 if not p.endswith('/'):
187 if not p.endswith('/'):
188 p += '/'
188 p += '/'
189 if prefix:
189 if prefix:
190 # looks like normpath replaces "" by "."
190 # looks like normpath replaces "" by "."
191 prefix = p + util.normpath(prefix)
191 prefix = p + util.normpath(prefix)
192 else:
192 else:
193 prefix = p
193 prefix = p
194 cmd.append(['log', 'rlog'][rlog])
194 cmd.append(['log', 'rlog'][rlog])
195 if date:
195 if date:
196 # no space between option and date string
196 # no space between option and date string
197 cmd.append('-d>%s' % date)
197 cmd.append('-d>%s' % date)
198 cmd.append(directory)
198 cmd.append(directory)
199
199
200 # state machine begins here
200 # state machine begins here
201 tags = {} # dictionary of revisions on current file with their tags
201 tags = {} # dictionary of revisions on current file with their tags
202 branchmap = {} # mapping between branch names and revision numbers
202 branchmap = {} # mapping between branch names and revision numbers
203 state = 0
203 state = 0
204 store = False # set when a new record can be appended
204 store = False # set when a new record can be appended
205
205
206 cmd = [util.shellquote(arg) for arg in cmd]
206 cmd = [util.shellquote(arg) for arg in cmd]
207 ui.note(_("running %s\n") % (' '.join(cmd)))
207 ui.note(_("running %s\n") % (' '.join(cmd)))
208 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
208 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
209
209
210 pfp = util.popen(' '.join(cmd))
210 pfp = util.popen(' '.join(cmd))
211 peek = pfp.readline()
211 peek = pfp.readline()
212 while True:
212 while True:
213 line = peek
213 line = peek
214 if line == '':
214 if line == '':
215 break
215 break
216 peek = pfp.readline()
216 peek = pfp.readline()
217 if line.endswith('\n'):
217 if line.endswith('\n'):
218 line = line[:-1]
218 line = line[:-1]
219 #ui.debug('state=%d line=%r\n' % (state, line))
219 #ui.debug('state=%d line=%r\n' % (state, line))
220
220
221 if state == 0:
221 if state == 0:
222 # initial state, consume input until we see 'RCS file'
222 # initial state, consume input until we see 'RCS file'
223 match = re_00.match(line)
223 match = re_00.match(line)
224 if match:
224 if match:
225 rcs = match.group(1)
225 rcs = match.group(1)
226 tags = {}
226 tags = {}
227 if rlog:
227 if rlog:
228 filename = util.normpath(rcs[:-2])
228 filename = util.normpath(rcs[:-2])
229 if filename.startswith(prefix):
229 if filename.startswith(prefix):
230 filename = filename[len(prefix):]
230 filename = filename[len(prefix):]
231 if filename.startswith('/'):
231 if filename.startswith('/'):
232 filename = filename[1:]
232 filename = filename[1:]
233 if filename.startswith('Attic/'):
233 if filename.startswith('Attic/'):
234 filename = filename[6:]
234 filename = filename[6:]
235 else:
235 else:
236 filename = filename.replace('/Attic/', '/')
236 filename = filename.replace('/Attic/', '/')
237 state = 2
237 state = 2
238 continue
238 continue
239 state = 1
239 state = 1
240 continue
240 continue
241 match = re_01.match(line)
241 match = re_01.match(line)
242 if match:
242 if match:
243 raise logerror(match.group(1))
243 raise logerror(match.group(1))
244 match = re_02.match(line)
244 match = re_02.match(line)
245 if match:
245 if match:
246 raise logerror(match.group(2))
246 raise logerror(match.group(2))
247 if re_03.match(line):
247 if re_03.match(line):
248 raise logerror(line)
248 raise logerror(line)
249
249
250 elif state == 1:
250 elif state == 1:
251 # expect 'Working file' (only when using log instead of rlog)
251 # expect 'Working file' (only when using log instead of rlog)
252 match = re_10.match(line)
252 match = re_10.match(line)
253 assert match, _('RCS file must be followed by working file')
253 assert match, _('RCS file must be followed by working file')
254 filename = util.normpath(match.group(1))
254 filename = util.normpath(match.group(1))
255 state = 2
255 state = 2
256
256
257 elif state == 2:
257 elif state == 2:
258 # expect 'symbolic names'
258 # expect 'symbolic names'
259 if re_20.match(line):
259 if re_20.match(line):
260 branchmap = {}
260 branchmap = {}
261 state = 3
261 state = 3
262
262
263 elif state == 3:
263 elif state == 3:
264 # read the symbolic names and store as tags
264 # read the symbolic names and store as tags
265 match = re_30.match(line)
265 match = re_30.match(line)
266 if match:
266 if match:
267 rev = [int(x) for x in match.group(2).split('.')]
267 rev = [int(x) for x in match.group(2).split('.')]
268
268
269 # Convert magic branch number to an odd-numbered one
269 # Convert magic branch number to an odd-numbered one
270 revn = len(rev)
270 revn = len(rev)
271 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
271 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
272 rev = rev[:-2] + rev[-1:]
272 rev = rev[:-2] + rev[-1:]
273 rev = tuple(rev)
273 rev = tuple(rev)
274
274
275 if rev not in tags:
275 if rev not in tags:
276 tags[rev] = []
276 tags[rev] = []
277 tags[rev].append(match.group(1))
277 tags[rev].append(match.group(1))
278 branchmap[match.group(1)] = match.group(2)
278 branchmap[match.group(1)] = match.group(2)
279
279
280 elif re_31.match(line):
280 elif re_31.match(line):
281 state = 5
281 state = 5
282 elif re_32.match(line):
282 elif re_32.match(line):
283 state = 0
283 state = 0
284
284
285 elif state == 4:
285 elif state == 4:
286 # expecting '------' separator before first revision
286 # expecting '------' separator before first revision
287 if re_31.match(line):
287 if re_31.match(line):
288 state = 5
288 state = 5
289 else:
289 else:
290 assert not re_32.match(line), _('must have at least '
290 assert not re_32.match(line), _('must have at least '
291 'some revisions')
291 'some revisions')
292
292
293 elif state == 5:
293 elif state == 5:
294 # expecting revision number and possibly (ignored) lock indication
294 # expecting revision number and possibly (ignored) lock indication
295 # we create the logentry here from values stored in states 0 to 4,
295 # we create the logentry here from values stored in states 0 to 4,
296 # as this state is re-entered for subsequent revisions of a file.
296 # as this state is re-entered for subsequent revisions of a file.
297 match = re_50.match(line)
297 match = re_50.match(line)
298 assert match, _('expected revision number')
298 assert match, _('expected revision number')
299 e = logentry(rcs=scache(rcs), file=scache(filename),
299 e = logentry(rcs=scache(rcs), file=scache(filename),
300 revision=tuple([int(x) for x in match.group(1).split('.')]),
300 revision=tuple([int(x) for x in match.group(1).split('.')]),
301 branches=[], parent=None)
301 branches=[], parent=None)
302 state = 6
302 state = 6
303
303
304 elif state == 6:
304 elif state == 6:
305 # expecting date, author, state, lines changed
305 # expecting date, author, state, lines changed
306 match = re_60.match(line)
306 match = re_60.match(line)
307 assert match, _('revision must be followed by date line')
307 assert match, _('revision must be followed by date line')
308 d = match.group(1)
308 d = match.group(1)
309 if d[2] == '/':
309 if d[2] == '/':
310 # Y2K
310 # Y2K
311 d = '19' + d
311 d = '19' + d
312
312
313 if len(d.split()) != 3:
313 if len(d.split()) != 3:
314 # cvs log dates always in GMT
314 # cvs log dates always in GMT
315 d = d + ' UTC'
315 d = d + ' UTC'
316 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
316 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
317 '%Y/%m/%d %H:%M:%S',
317 '%Y/%m/%d %H:%M:%S',
318 '%Y-%m-%d %H:%M:%S'])
318 '%Y-%m-%d %H:%M:%S'])
319 e.author = scache(match.group(2))
319 e.author = scache(match.group(2))
320 e.dead = match.group(3).lower() == 'dead'
320 e.dead = match.group(3).lower() == 'dead'
321
321
322 if match.group(5):
322 if match.group(5):
323 if match.group(6):
323 if match.group(6):
324 e.lines = (int(match.group(5)), int(match.group(6)))
324 e.lines = (int(match.group(5)), int(match.group(6)))
325 else:
325 else:
326 e.lines = (int(match.group(5)), 0)
326 e.lines = (int(match.group(5)), 0)
327 elif match.group(6):
327 elif match.group(6):
328 e.lines = (0, int(match.group(6)))
328 e.lines = (0, int(match.group(6)))
329 else:
329 else:
330 e.lines = None
330 e.lines = None
331
331
332 if match.group(7): # cvsnt mergepoint
332 if match.group(7): # cvsnt mergepoint
333 myrev = match.group(8).split('.')
333 myrev = match.group(8).split('.')
334 if len(myrev) == 2: # head
334 if len(myrev) == 2: # head
335 e.mergepoint = 'HEAD'
335 e.mergepoint = 'HEAD'
336 else:
336 else:
337 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
337 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
338 branches = [b for b in branchmap if branchmap[b] == myrev]
338 branches = [b for b in branchmap if branchmap[b] == myrev]
339 assert len(branches) == 1, ('unknown branch: %s'
339 assert len(branches) == 1, ('unknown branch: %s'
340 % e.mergepoint)
340 % e.mergepoint)
341 e.mergepoint = branches[0]
341 e.mergepoint = branches[0]
342 else:
342 else:
343 e.mergepoint = None
343 e.mergepoint = None
344 e.comment = []
344 e.comment = []
345 state = 7
345 state = 7
346
346
347 elif state == 7:
347 elif state == 7:
348 # read the revision numbers of branches that start at this revision
348 # read the revision numbers of branches that start at this revision
349 # or store the commit log message otherwise
349 # or store the commit log message otherwise
350 m = re_70.match(line)
350 m = re_70.match(line)
351 if m:
351 if m:
352 e.branches = [tuple([int(y) for y in x.strip().split('.')])
352 e.branches = [tuple([int(y) for y in x.strip().split('.')])
353 for x in m.group(1).split(';')]
353 for x in m.group(1).split(';')]
354 state = 8
354 state = 8
355 elif re_31.match(line) and re_50.match(peek):
355 elif re_31.match(line) and re_50.match(peek):
356 state = 5
356 state = 5
357 store = True
357 store = True
358 elif re_32.match(line):
358 elif re_32.match(line):
359 state = 0
359 state = 0
360 store = True
360 store = True
361 else:
361 else:
362 e.comment.append(line)
362 e.comment.append(line)
363
363
364 elif state == 8:
364 elif state == 8:
365 # store commit log message
365 # store commit log message
366 if re_31.match(line):
366 if re_31.match(line):
367 cpeek = peek
367 cpeek = peek
368 if cpeek.endswith('\n'):
368 if cpeek.endswith('\n'):
369 cpeek = cpeek[:-1]
369 cpeek = cpeek[:-1]
370 if re_50.match(cpeek):
370 if re_50.match(cpeek):
371 state = 5
371 state = 5
372 store = True
372 store = True
373 else:
373 else:
374 e.comment.append(line)
374 e.comment.append(line)
375 elif re_32.match(line):
375 elif re_32.match(line):
376 state = 0
376 state = 0
377 store = True
377 store = True
378 else:
378 else:
379 e.comment.append(line)
379 e.comment.append(line)
380
380
381 # When a file is added on a branch B1, CVS creates a synthetic
381 # When a file is added on a branch B1, CVS creates a synthetic
382 # dead trunk revision 1.1 so that the branch has a root.
382 # dead trunk revision 1.1 so that the branch has a root.
383 # Likewise, if you merge such a file to a later branch B2 (one
383 # Likewise, if you merge such a file to a later branch B2 (one
384 # that already existed when the file was added on B1), CVS
384 # that already existed when the file was added on B1), CVS
385 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
385 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
386 # these revisions now, but mark them synthetic so
386 # these revisions now, but mark them synthetic so
387 # createchangeset() can take care of them.
387 # createchangeset() can take care of them.
388 if (store and
388 if (store and
389 e.dead and
389 e.dead and
390 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
390 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
391 len(e.comment) == 1 and
391 len(e.comment) == 1 and
392 file_added_re.match(e.comment[0])):
392 file_added_re.match(e.comment[0])):
393 ui.debug('found synthetic revision in %s: %r\n'
393 ui.debug('found synthetic revision in %s: %r\n'
394 % (e.rcs, e.comment[0]))
394 % (e.rcs, e.comment[0]))
395 e.synthetic = True
395 e.synthetic = True
396
396
397 if store:
397 if store:
398 # clean up the results and save in the log.
398 # clean up the results and save in the log.
399 store = False
399 store = False
400 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
400 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
401 e.comment = scache('\n'.join(e.comment))
401 e.comment = scache('\n'.join(e.comment))
402
402
403 revn = len(e.revision)
403 revn = len(e.revision)
404 if revn > 3 and (revn % 2) == 0:
404 if revn > 3 and (revn % 2) == 0:
405 e.branch = tags.get(e.revision[:-1], [None])[0]
405 e.branch = tags.get(e.revision[:-1], [None])[0]
406 else:
406 else:
407 e.branch = None
407 e.branch = None
408
408
409 # find the branches starting from this revision
409 # find the branches starting from this revision
410 branchpoints = set()
410 branchpoints = set()
411 for branch, revision in branchmap.iteritems():
411 for branch, revision in branchmap.iteritems():
412 revparts = tuple([int(i) for i in revision.split('.')])
412 revparts = tuple([int(i) for i in revision.split('.')])
413 if len(revparts) < 2: # bad tags
413 if len(revparts) < 2: # bad tags
414 continue
414 continue
415 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
415 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
416 # normal branch
416 # normal branch
417 if revparts[:-2] == e.revision:
417 if revparts[:-2] == e.revision:
418 branchpoints.add(branch)
418 branchpoints.add(branch)
419 elif revparts == (1, 1, 1): # vendor branch
419 elif revparts == (1, 1, 1): # vendor branch
420 if revparts in e.branches:
420 if revparts in e.branches:
421 branchpoints.add(branch)
421 branchpoints.add(branch)
422 e.branchpoints = branchpoints
422 e.branchpoints = branchpoints
423
423
424 log.append(e)
424 log.append(e)
425
425
426 if len(log) % 100 == 0:
426 if len(log) % 100 == 0:
427 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
427 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
428
428
429 log.sort(key=lambda x: (x.rcs, x.revision))
429 log.sort(key=lambda x: (x.rcs, x.revision))
430
430
431 # find parent revisions of individual files
431 # find parent revisions of individual files
432 versions = {}
432 versions = {}
433 for e in log:
433 for e in log:
434 branch = e.revision[:-1]
434 branch = e.revision[:-1]
435 p = versions.get((e.rcs, branch), None)
435 p = versions.get((e.rcs, branch), None)
436 if p is None:
436 if p is None:
437 p = e.revision[:-2]
437 p = e.revision[:-2]
438 e.parent = p
438 e.parent = p
439 versions[(e.rcs, branch)] = e.revision
439 versions[(e.rcs, branch)] = e.revision
440
440
441 # update the log cache
441 # update the log cache
442 if cache:
442 if cache:
443 if log:
443 if log:
444 # join up the old and new logs
444 # join up the old and new logs
445 log.sort(key=lambda x: x.date)
445 log.sort(key=lambda x: x.date)
446
446
447 if oldlog and oldlog[-1].date >= log[0].date:
447 if oldlog and oldlog[-1].date >= log[0].date:
448 raise logerror(_('log cache overlaps with new log entries,'
448 raise logerror(_('log cache overlaps with new log entries,'
449 ' re-run without cache.'))
449 ' re-run without cache.'))
450
450
451 log = oldlog + log
451 log = oldlog + log
452
452
453 # write the new cachefile
453 # write the new cachefile
454 ui.note(_('writing cvs log cache %s\n') % cachefile)
454 ui.note(_('writing cvs log cache %s\n') % cachefile)
455 pickle.dump(log, open(cachefile, 'w'))
455 pickle.dump(log, open(cachefile, 'w'))
456 else:
456 else:
457 log = oldlog
457 log = oldlog
458
458
459 ui.status(_('%d log entries\n') % len(log))
459 ui.status(_('%d log entries\n') % len(log))
460
460
461 hook.hook(ui, None, "cvslog", True, log=log)
461 hook.hook(ui, None, "cvslog", True, log=log)
462
462
463 return log
463 return log
464
464
465
465
466 class changeset(object):
466 class changeset(object):
467 '''Class changeset has the following attributes:
467 '''Class changeset has the following attributes:
468 .id - integer identifying this changeset (list index)
468 .id - integer identifying this changeset (list index)
469 .author - author name as CVS knows it
469 .author - author name as CVS knows it
470 .branch - name of branch this changeset is on, or None
470 .branch - name of branch this changeset is on, or None
471 .comment - commit message
471 .comment - commit message
472 .date - the commit date as a (time,tz) tuple
472 .date - the commit date as a (time,tz) tuple
473 .entries - list of logentry objects in this changeset
473 .entries - list of logentry objects in this changeset
474 .parents - list of one or two parent changesets
474 .parents - list of one or two parent changesets
475 .tags - list of tags on this changeset
475 .tags - list of tags on this changeset
476 .synthetic - from synthetic revision "file ... added on branch ..."
476 .synthetic - from synthetic revision "file ... added on branch ..."
477 .mergepoint- the branch that has been merged from
477 .mergepoint- the branch that has been merged from
478 (if present in rlog output)
478 (if present in rlog output)
479 .branchpoints- the branches that start at the current entry
479 .branchpoints- the branches that start at the current entry
480 '''
480 '''
481 def __init__(self, **entries):
481 def __init__(self, **entries):
482 self.synthetic = False
482 self.synthetic = False
483 self.__dict__.update(entries)
483 self.__dict__.update(entries)
484
484
485 def __repr__(self):
485 def __repr__(self):
486 return "<%s at 0x%x: %s>" % (self.__class__.__name__,
486 return "<%s at 0x%x: %s>" % (self.__class__.__name__,
487 id(self),
487 id(self),
488 getattr(self, 'id', "(no id)"))
488 getattr(self, 'id', "(no id)"))
489
489
490 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
490 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
491 '''Convert log into changesets.'''
491 '''Convert log into changesets.'''
492
492
493 ui.status(_('creating changesets\n'))
493 ui.status(_('creating changesets\n'))
494
494
495 # Merge changesets
495 # Merge changesets
496
496
497 log.sort(key=lambda x: (x.comment, x.author, x.branch, x.date))
497 log.sort(key=lambda x: (x.comment, x.author, x.branch, x.date))
498
498
499 changesets = []
499 changesets = []
500 files = set()
500 files = set()
501 c = None
501 c = None
502 for i, e in enumerate(log):
502 for i, e in enumerate(log):
503
503
504 # Check if log entry belongs to the current changeset or not.
504 # Check if log entry belongs to the current changeset or not.
505
505
506 # Since CVS is file centric, two different file revisions with
506 # Since CVS is file centric, two different file revisions with
507 # different branchpoints should be treated as belonging to two
507 # different branchpoints should be treated as belonging to two
508 # different changesets (and the ordering is important and not
508 # different changesets (and the ordering is important and not
509 # honoured by cvsps at this point).
509 # honoured by cvsps at this point).
510 #
510 #
511 # Consider the following case:
511 # Consider the following case:
512 # foo 1.1 branchpoints: [MYBRANCH]
512 # foo 1.1 branchpoints: [MYBRANCH]
513 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
513 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
514 #
514 #
515 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
515 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
516 # later version of foo may be in MYBRANCH2, so foo should be the
516 # later version of foo may be in MYBRANCH2, so foo should be the
517 # first changeset and bar the next and MYBRANCH and MYBRANCH2
517 # first changeset and bar the next and MYBRANCH and MYBRANCH2
518 # should both start off of the bar changeset. No provisions are
518 # should both start off of the bar changeset. No provisions are
519 # made to ensure that this is, in fact, what happens.
519 # made to ensure that this is, in fact, what happens.
520 if not (c and
520 if not (c and
521 e.comment == c.comment and
521 e.comment == c.comment and
522 e.author == c.author and
522 e.author == c.author and
523 e.branch == c.branch and
523 e.branch == c.branch and
524 (not util.safehasattr(e, 'branchpoints') or
524 (not util.safehasattr(e, 'branchpoints') or
525 not util.safehasattr (c, 'branchpoints') or
525 not util.safehasattr (c, 'branchpoints') or
526 e.branchpoints == c.branchpoints) and
526 e.branchpoints == c.branchpoints) and
527 ((c.date[0] + c.date[1]) <=
527 ((c.date[0] + c.date[1]) <=
528 (e.date[0] + e.date[1]) <=
528 (e.date[0] + e.date[1]) <=
529 (c.date[0] + c.date[1]) + fuzz) and
529 (c.date[0] + c.date[1]) + fuzz) and
530 e.file not in files):
530 e.file not in files):
531 c = changeset(comment=e.comment, author=e.author,
531 c = changeset(comment=e.comment, author=e.author,
532 branch=e.branch, date=e.date, entries=[],
532 branch=e.branch, date=e.date, entries=[],
533 mergepoint=getattr(e, 'mergepoint', None),
533 mergepoint=getattr(e, 'mergepoint', None),
534 branchpoints=getattr(e, 'branchpoints', set()))
534 branchpoints=getattr(e, 'branchpoints', set()))
535 changesets.append(c)
535 changesets.append(c)
536 files = set()
536 files = set()
537 if len(changesets) % 100 == 0:
537 if len(changesets) % 100 == 0:
538 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
538 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
539 ui.status(util.ellipsis(t, 80) + '\n')
539 ui.status(util.ellipsis(t, 80) + '\n')
540
540
541 c.entries.append(e)
541 c.entries.append(e)
542 files.add(e.file)
542 files.add(e.file)
543 c.date = e.date # changeset date is date of latest commit in it
543 c.date = e.date # changeset date is date of latest commit in it
544
544
545 # Mark synthetic changesets
545 # Mark synthetic changesets
546
546
547 for c in changesets:
547 for c in changesets:
548 # Synthetic revisions always get their own changeset, because
548 # Synthetic revisions always get their own changeset, because
549 # the log message includes the filename. E.g. if you add file3
549 # the log message includes the filename. E.g. if you add file3
550 # and file4 on a branch, you get four log entries and three
550 # and file4 on a branch, you get four log entries and three
551 # changesets:
551 # changesets:
552 # "File file3 was added on branch ..." (synthetic, 1 entry)
552 # "File file3 was added on branch ..." (synthetic, 1 entry)
553 # "File file4 was added on branch ..." (synthetic, 1 entry)
553 # "File file4 was added on branch ..." (synthetic, 1 entry)
554 # "Add file3 and file4 to fix ..." (real, 2 entries)
554 # "Add file3 and file4 to fix ..." (real, 2 entries)
555 # Hence the check for 1 entry here.
555 # Hence the check for 1 entry here.
556 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
556 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
557
557
558 # Sort files in each changeset
558 # Sort files in each changeset
559
559
560 def entitycompare(l, r):
560 def entitycompare(l, r):
561 'Mimic cvsps sorting order'
561 'Mimic cvsps sorting order'
562 l = l.file.split('/')
562 l = l.file.split('/')
563 r = r.file.split('/')
563 r = r.file.split('/')
564 nl = len(l)
564 nl = len(l)
565 nr = len(r)
565 nr = len(r)
566 n = min(nl, nr)
566 n = min(nl, nr)
567 for i in range(n):
567 for i in range(n):
568 if i + 1 == nl and nl < nr:
568 if i + 1 == nl and nl < nr:
569 return -1
569 return -1
570 elif i + 1 == nr and nl > nr:
570 elif i + 1 == nr and nl > nr:
571 return +1
571 return +1
572 elif l[i] < r[i]:
572 elif l[i] < r[i]:
573 return -1
573 return -1
574 elif l[i] > r[i]:
574 elif l[i] > r[i]:
575 return +1
575 return +1
576 return 0
576 return 0
577
577
578 for c in changesets:
578 for c in changesets:
579 c.entries.sort(entitycompare)
579 c.entries.sort(entitycompare)
580
580
581 # Sort changesets by date
581 # Sort changesets by date
582
582
583 def cscmp(l, r):
583 def cscmp(l, r):
584 d = sum(l.date) - sum(r.date)
584 d = sum(l.date) - sum(r.date)
585 if d:
585 if d:
586 return d
586 return d
587
587
588 # detect vendor branches and initial commits on a branch
588 # detect vendor branches and initial commits on a branch
589 le = {}
589 le = {}
590 for e in l.entries:
590 for e in l.entries:
591 le[e.rcs] = e.revision
591 le[e.rcs] = e.revision
592 re = {}
592 re = {}
593 for e in r.entries:
593 for e in r.entries:
594 re[e.rcs] = e.revision
594 re[e.rcs] = e.revision
595
595
596 d = 0
596 d = 0
597 for e in l.entries:
597 for e in l.entries:
598 if re.get(e.rcs, None) == e.parent:
598 if re.get(e.rcs, None) == e.parent:
599 assert not d
599 assert not d
600 d = 1
600 d = 1
601 break
601 break
602
602
603 for e in r.entries:
603 for e in r.entries:
604 if le.get(e.rcs, None) == e.parent:
604 if le.get(e.rcs, None) == e.parent:
605 assert not d
605 assert not d
606 d = -1
606 d = -1
607 break
607 break
608
608
609 return d
609 return d
610
610
611 changesets.sort(cscmp)
611 changesets.sort(cscmp)
612
612
613 # Collect tags
613 # Collect tags
614
614
615 globaltags = {}
615 globaltags = {}
616 for c in changesets:
616 for c in changesets:
617 for e in c.entries:
617 for e in c.entries:
618 for tag in e.tags:
618 for tag in e.tags:
619 # remember which is the latest changeset to have this tag
619 # remember which is the latest changeset to have this tag
620 globaltags[tag] = c
620 globaltags[tag] = c
621
621
622 for c in changesets:
622 for c in changesets:
623 tags = set()
623 tags = set()
624 for e in c.entries:
624 for e in c.entries:
625 tags.update(e.tags)
625 tags.update(e.tags)
626 # remember tags only if this is the latest changeset to have it
626 # remember tags only if this is the latest changeset to have it
627 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
627 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
628
628
629 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
629 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
630 # by inserting dummy changesets with two parents, and handle
630 # by inserting dummy changesets with two parents, and handle
631 # {{mergefrombranch BRANCHNAME}} by setting two parents.
631 # {{mergefrombranch BRANCHNAME}} by setting two parents.
632
632
633 if mergeto is None:
633 if mergeto is None:
634 mergeto = r'{{mergetobranch ([-\w]+)}}'
634 mergeto = r'{{mergetobranch ([-\w]+)}}'
635 if mergeto:
635 if mergeto:
636 mergeto = re.compile(mergeto)
636 mergeto = re.compile(mergeto)
637
637
638 if mergefrom is None:
638 if mergefrom is None:
639 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
639 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
640 if mergefrom:
640 if mergefrom:
641 mergefrom = re.compile(mergefrom)
641 mergefrom = re.compile(mergefrom)
642
642
643 versions = {} # changeset index where we saw any particular file version
643 versions = {} # changeset index where we saw any particular file version
644 branches = {} # changeset index where we saw a branch
644 branches = {} # changeset index where we saw a branch
645 n = len(changesets)
645 n = len(changesets)
646 i = 0
646 i = 0
647 while i < n:
647 while i < n:
648 c = changesets[i]
648 c = changesets[i]
649
649
650 for f in c.entries:
650 for f in c.entries:
651 versions[(f.rcs, f.revision)] = i
651 versions[(f.rcs, f.revision)] = i
652
652
653 p = None
653 p = None
654 if c.branch in branches:
654 if c.branch in branches:
655 p = branches[c.branch]
655 p = branches[c.branch]
656 else:
656 else:
657 # first changeset on a new branch
657 # first changeset on a new branch
658 # the parent is a changeset with the branch in its
658 # the parent is a changeset with the branch in its
659 # branchpoints such that it is the latest possible
659 # branchpoints such that it is the latest possible
660 # commit without any intervening, unrelated commits.
660 # commit without any intervening, unrelated commits.
661
661
662 for candidate in xrange(i):
662 for candidate in xrange(i):
663 if c.branch not in changesets[candidate].branchpoints:
663 if c.branch not in changesets[candidate].branchpoints:
664 if p is not None:
664 if p is not None:
665 break
665 break
666 continue
666 continue
667 p = candidate
667 p = candidate
668
668
669 c.parents = []
669 c.parents = []
670 if p is not None:
670 if p is not None:
671 p = changesets[p]
671 p = changesets[p]
672
672
673 # Ensure no changeset has a synthetic changeset as a parent.
673 # Ensure no changeset has a synthetic changeset as a parent.
674 while p.synthetic:
674 while p.synthetic:
675 assert len(p.parents) <= 1, \
675 assert len(p.parents) <= 1, \
676 _('synthetic changeset cannot have multiple parents')
676 _('synthetic changeset cannot have multiple parents')
677 if p.parents:
677 if p.parents:
678 p = p.parents[0]
678 p = p.parents[0]
679 else:
679 else:
680 p = None
680 p = None
681 break
681 break
682
682
683 if p is not None:
683 if p is not None:
684 c.parents.append(p)
684 c.parents.append(p)
685
685
686 if c.mergepoint:
686 if c.mergepoint:
687 if c.mergepoint == 'HEAD':
687 if c.mergepoint == 'HEAD':
688 c.mergepoint = None
688 c.mergepoint = None
689 c.parents.append(changesets[branches[c.mergepoint]])
689 c.parents.append(changesets[branches[c.mergepoint]])
690
690
691 if mergefrom:
691 if mergefrom:
692 m = mergefrom.search(c.comment)
692 m = mergefrom.search(c.comment)
693 if m:
693 if m:
694 m = m.group(1)
694 m = m.group(1)
695 if m == 'HEAD':
695 if m == 'HEAD':
696 m = None
696 m = None
697 try:
697 try:
698 candidate = changesets[branches[m]]
698 candidate = changesets[branches[m]]
699 except KeyError:
699 except KeyError:
700 ui.warn(_("warning: CVS commit message references "
700 ui.warn(_("warning: CVS commit message references "
701 "non-existent branch %r:\n%s\n")
701 "non-existent branch %r:\n%s\n")
702 % (m, c.comment))
702 % (m, c.comment))
703 if m in branches and c.branch != m and not candidate.synthetic:
703 if m in branches and c.branch != m and not candidate.synthetic:
704 c.parents.append(candidate)
704 c.parents.append(candidate)
705
705
706 if mergeto:
706 if mergeto:
707 m = mergeto.search(c.comment)
707 m = mergeto.search(c.comment)
708 if m:
708 if m:
709 try:
709 if m.groups():
710 m = m.group(1)
710 m = m.group(1)
711 if m == 'HEAD':
711 if m == 'HEAD':
712 m = None
712 m = None
713 except:
713 else:
714 m = None # if no group found then merge to HEAD
714 m = None # if no group found then merge to HEAD
715 if m in branches and c.branch != m:
715 if m in branches and c.branch != m:
716 # insert empty changeset for merge
716 # insert empty changeset for merge
717 cc = changeset(
717 cc = changeset(
718 author=c.author, branch=m, date=c.date,
718 author=c.author, branch=m, date=c.date,
719 comment='convert-repo: CVS merge from branch %s'
719 comment='convert-repo: CVS merge from branch %s'
720 % c.branch,
720 % c.branch,
721 entries=[], tags=[],
721 entries=[], tags=[],
722 parents=[changesets[branches[m]], c])
722 parents=[changesets[branches[m]], c])
723 changesets.insert(i + 1, cc)
723 changesets.insert(i + 1, cc)
724 branches[m] = i + 1
724 branches[m] = i + 1
725
725
726 # adjust our loop counters now we have inserted a new entry
726 # adjust our loop counters now we have inserted a new entry
727 n += 1
727 n += 1
728 i += 2
728 i += 2
729 continue
729 continue
730
730
731 branches[c.branch] = i
731 branches[c.branch] = i
732 i += 1
732 i += 1
733
733
734 # Drop synthetic changesets (safe now that we have ensured no other
734 # Drop synthetic changesets (safe now that we have ensured no other
735 # changesets can have them as parents).
735 # changesets can have them as parents).
736 i = 0
736 i = 0
737 while i < len(changesets):
737 while i < len(changesets):
738 if changesets[i].synthetic:
738 if changesets[i].synthetic:
739 del changesets[i]
739 del changesets[i]
740 else:
740 else:
741 i += 1
741 i += 1
742
742
743 # Number changesets
743 # Number changesets
744
744
745 for i, c in enumerate(changesets):
745 for i, c in enumerate(changesets):
746 c.id = i + 1
746 c.id = i + 1
747
747
748 ui.status(_('%d changeset entries\n') % len(changesets))
748 ui.status(_('%d changeset entries\n') % len(changesets))
749
749
750 hook.hook(ui, None, "cvschangesets", True, changesets=changesets)
750 hook.hook(ui, None, "cvschangesets", True, changesets=changesets)
751
751
752 return changesets
752 return changesets
753
753
754
754
755 def debugcvsps(ui, *args, **opts):
755 def debugcvsps(ui, *args, **opts):
756 '''Read CVS rlog for current directory or named path in
756 '''Read CVS rlog for current directory or named path in
757 repository, and convert the log to changesets based on matching
757 repository, and convert the log to changesets based on matching
758 commit log entries and dates.
758 commit log entries and dates.
759 '''
759 '''
760 if opts["new_cache"]:
760 if opts["new_cache"]:
761 cache = "write"
761 cache = "write"
762 elif opts["update_cache"]:
762 elif opts["update_cache"]:
763 cache = "update"
763 cache = "update"
764 else:
764 else:
765 cache = None
765 cache = None
766
766
767 revisions = opts["revisions"]
767 revisions = opts["revisions"]
768
768
769 try:
769 try:
770 if args:
770 if args:
771 log = []
771 log = []
772 for d in args:
772 for d in args:
773 log += createlog(ui, d, root=opts["root"], cache=cache)
773 log += createlog(ui, d, root=opts["root"], cache=cache)
774 else:
774 else:
775 log = createlog(ui, root=opts["root"], cache=cache)
775 log = createlog(ui, root=opts["root"], cache=cache)
776 except logerror, e:
776 except logerror, e:
777 ui.write("%r\n"%e)
777 ui.write("%r\n"%e)
778 return
778 return
779
779
780 changesets = createchangeset(ui, log, opts["fuzz"])
780 changesets = createchangeset(ui, log, opts["fuzz"])
781 del log
781 del log
782
782
783 # Print changesets (optionally filtered)
783 # Print changesets (optionally filtered)
784
784
785 off = len(revisions)
785 off = len(revisions)
786 branches = {} # latest version number in each branch
786 branches = {} # latest version number in each branch
787 ancestors = {} # parent branch
787 ancestors = {} # parent branch
788 for cs in changesets:
788 for cs in changesets:
789
789
790 if opts["ancestors"]:
790 if opts["ancestors"]:
791 if cs.branch not in branches and cs.parents and cs.parents[0].id:
791 if cs.branch not in branches and cs.parents and cs.parents[0].id:
792 ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
792 ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
793 cs.parents[0].id)
793 cs.parents[0].id)
794 branches[cs.branch] = cs.id
794 branches[cs.branch] = cs.id
795
795
796 # limit by branches
796 # limit by branches
797 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
797 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
798 continue
798 continue
799
799
800 if not off:
800 if not off:
801 # Note: trailing spaces on several lines here are needed to have
801 # Note: trailing spaces on several lines here are needed to have
802 # bug-for-bug compatibility with cvsps.
802 # bug-for-bug compatibility with cvsps.
803 ui.write('---------------------\n')
803 ui.write('---------------------\n')
804 ui.write('PatchSet %d \n' % cs.id)
804 ui.write('PatchSet %d \n' % cs.id)
805 ui.write('Date: %s\n' % util.datestr(cs.date,
805 ui.write('Date: %s\n' % util.datestr(cs.date,
806 '%Y/%m/%d %H:%M:%S %1%2'))
806 '%Y/%m/%d %H:%M:%S %1%2'))
807 ui.write('Author: %s\n' % cs.author)
807 ui.write('Author: %s\n' % cs.author)
808 ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
808 ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
809 ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
809 ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
810 ','.join(cs.tags) or '(none)'))
810 ','.join(cs.tags) or '(none)'))
811 branchpoints = getattr(cs, 'branchpoints', None)
811 branchpoints = getattr(cs, 'branchpoints', None)
812 if branchpoints:
812 if branchpoints:
813 ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
813 ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
814 if opts["parents"] and cs.parents:
814 if opts["parents"] and cs.parents:
815 if len(cs.parents) > 1:
815 if len(cs.parents) > 1:
816 ui.write('Parents: %s\n' %
816 ui.write('Parents: %s\n' %
817 (','.join([str(p.id) for p in cs.parents])))
817 (','.join([str(p.id) for p in cs.parents])))
818 else:
818 else:
819 ui.write('Parent: %d\n' % cs.parents[0].id)
819 ui.write('Parent: %d\n' % cs.parents[0].id)
820
820
821 if opts["ancestors"]:
821 if opts["ancestors"]:
822 b = cs.branch
822 b = cs.branch
823 r = []
823 r = []
824 while b:
824 while b:
825 b, c = ancestors[b]
825 b, c = ancestors[b]
826 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
826 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
827 if r:
827 if r:
828 ui.write('Ancestors: %s\n' % (','.join(r)))
828 ui.write('Ancestors: %s\n' % (','.join(r)))
829
829
830 ui.write('Log:\n')
830 ui.write('Log:\n')
831 ui.write('%s\n\n' % cs.comment)
831 ui.write('%s\n\n' % cs.comment)
832 ui.write('Members: \n')
832 ui.write('Members: \n')
833 for f in cs.entries:
833 for f in cs.entries:
834 fn = f.file
834 fn = f.file
835 if fn.startswith(opts["prefix"]):
835 if fn.startswith(opts["prefix"]):
836 fn = fn[len(opts["prefix"]):]
836 fn = fn[len(opts["prefix"]):]
837 ui.write('\t%s:%s->%s%s \n' % (
837 ui.write('\t%s:%s->%s%s \n' % (
838 fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
838 fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
839 '.'.join([str(x) for x in f.revision]),
839 '.'.join([str(x) for x in f.revision]),
840 ['', '(DEAD)'][f.dead]))
840 ['', '(DEAD)'][f.dead]))
841 ui.write('\n')
841 ui.write('\n')
842
842
843 # have we seen the start tag?
843 # have we seen the start tag?
844 if revisions and off:
844 if revisions and off:
845 if revisions[0] == str(cs.id) or \
845 if revisions[0] == str(cs.id) or \
846 revisions[0] in cs.tags:
846 revisions[0] in cs.tags:
847 off = False
847 off = False
848
848
849 # see if we reached the end tag
849 # see if we reached the end tag
850 if len(revisions) > 1 and not off:
850 if len(revisions) > 1 and not off:
851 if revisions[1] == str(cs.id) or \
851 if revisions[1] == str(cs.id) or \
852 revisions[1] in cs.tags:
852 revisions[1] in cs.tags:
853 break
853 break
@@ -1,360 +1,360 b''
1 # monotone.py - monotone support for the convert extension
1 # monotone.py - monotone support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <mikkel@dvide.com> and
3 # Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <mikkel@dvide.com> and
4 # others
4 # others
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import os, re
9 import os, re
10 from mercurial import util
10 from mercurial import util
11 from common import NoRepo, commit, converter_source, checktool
11 from common import NoRepo, commit, converter_source, checktool
12 from common import commandline
12 from common import commandline
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14
14
15 class monotone_source(converter_source, commandline):
15 class monotone_source(converter_source, commandline):
16 def __init__(self, ui, path=None, rev=None):
16 def __init__(self, ui, path=None, rev=None):
17 converter_source.__init__(self, ui, path, rev)
17 converter_source.__init__(self, ui, path, rev)
18 commandline.__init__(self, ui, 'mtn')
18 commandline.__init__(self, ui, 'mtn')
19
19
20 self.ui = ui
20 self.ui = ui
21 self.path = path
21 self.path = path
22 self.automatestdio = False
22 self.automatestdio = False
23 self.rev = rev
23 self.rev = rev
24
24
25 norepo = NoRepo(_("%s does not look like a monotone repository")
25 norepo = NoRepo(_("%s does not look like a monotone repository")
26 % path)
26 % path)
27 if not os.path.exists(os.path.join(path, '_MTN')):
27 if not os.path.exists(os.path.join(path, '_MTN')):
28 # Could be a monotone repository (SQLite db file)
28 # Could be a monotone repository (SQLite db file)
29 try:
29 try:
30 f = file(path, 'rb')
30 f = file(path, 'rb')
31 header = f.read(16)
31 header = f.read(16)
32 f.close()
32 f.close()
33 except:
33 except IOError:
34 header = ''
34 header = ''
35 if header != 'SQLite format 3\x00':
35 if header != 'SQLite format 3\x00':
36 raise norepo
36 raise norepo
37
37
38 # regular expressions for parsing monotone output
38 # regular expressions for parsing monotone output
39 space = r'\s*'
39 space = r'\s*'
40 name = r'\s+"((?:\\"|[^"])*)"\s*'
40 name = r'\s+"((?:\\"|[^"])*)"\s*'
41 value = name
41 value = name
42 revision = r'\s+\[(\w+)\]\s*'
42 revision = r'\s+\[(\w+)\]\s*'
43 lines = r'(?:.|\n)+'
43 lines = r'(?:.|\n)+'
44
44
45 self.dir_re = re.compile(space + "dir" + name)
45 self.dir_re = re.compile(space + "dir" + name)
46 self.file_re = re.compile(space + "file" + name +
46 self.file_re = re.compile(space + "file" + name +
47 "content" + revision)
47 "content" + revision)
48 self.add_file_re = re.compile(space + "add_file" + name +
48 self.add_file_re = re.compile(space + "add_file" + name +
49 "content" + revision)
49 "content" + revision)
50 self.patch_re = re.compile(space + "patch" + name +
50 self.patch_re = re.compile(space + "patch" + name +
51 "from" + revision + "to" + revision)
51 "from" + revision + "to" + revision)
52 self.rename_re = re.compile(space + "rename" + name + "to" + name)
52 self.rename_re = re.compile(space + "rename" + name + "to" + name)
53 self.delete_re = re.compile(space + "delete" + name)
53 self.delete_re = re.compile(space + "delete" + name)
54 self.tag_re = re.compile(space + "tag" + name + "revision" +
54 self.tag_re = re.compile(space + "tag" + name + "revision" +
55 revision)
55 revision)
56 self.cert_re = re.compile(lines + space + "name" + name +
56 self.cert_re = re.compile(lines + space + "name" + name +
57 "value" + value)
57 "value" + value)
58
58
59 attr = space + "file" + lines + space + "attr" + space
59 attr = space + "file" + lines + space + "attr" + space
60 self.attr_execute_re = re.compile(attr + '"mtn:execute"' +
60 self.attr_execute_re = re.compile(attr + '"mtn:execute"' +
61 space + '"true"')
61 space + '"true"')
62
62
63 # cached data
63 # cached data
64 self.manifest_rev = None
64 self.manifest_rev = None
65 self.manifest = None
65 self.manifest = None
66 self.files = None
66 self.files = None
67 self.dirs = None
67 self.dirs = None
68
68
69 checktool('mtn', abort=False)
69 checktool('mtn', abort=False)
70
70
71 def mtnrun(self, *args, **kwargs):
71 def mtnrun(self, *args, **kwargs):
72 if self.automatestdio:
72 if self.automatestdio:
73 return self.mtnrunstdio(*args, **kwargs)
73 return self.mtnrunstdio(*args, **kwargs)
74 else:
74 else:
75 return self.mtnrunsingle(*args, **kwargs)
75 return self.mtnrunsingle(*args, **kwargs)
76
76
77 def mtnrunsingle(self, *args, **kwargs):
77 def mtnrunsingle(self, *args, **kwargs):
78 kwargs['d'] = self.path
78 kwargs['d'] = self.path
79 return self.run0('automate', *args, **kwargs)
79 return self.run0('automate', *args, **kwargs)
80
80
81 def mtnrunstdio(self, *args, **kwargs):
81 def mtnrunstdio(self, *args, **kwargs):
82 # Prepare the command in automate stdio format
82 # Prepare the command in automate stdio format
83 command = []
83 command = []
84 for k, v in kwargs.iteritems():
84 for k, v in kwargs.iteritems():
85 command.append("%s:%s" % (len(k), k))
85 command.append("%s:%s" % (len(k), k))
86 if v:
86 if v:
87 command.append("%s:%s" % (len(v), v))
87 command.append("%s:%s" % (len(v), v))
88 if command:
88 if command:
89 command.insert(0, 'o')
89 command.insert(0, 'o')
90 command.append('e')
90 command.append('e')
91
91
92 command.append('l')
92 command.append('l')
93 for arg in args:
93 for arg in args:
94 command += "%s:%s" % (len(arg), arg)
94 command += "%s:%s" % (len(arg), arg)
95 command.append('e')
95 command.append('e')
96 command = ''.join(command)
96 command = ''.join(command)
97
97
98 self.ui.debug("mtn: sending '%s'\n" % command)
98 self.ui.debug("mtn: sending '%s'\n" % command)
99 self.mtnwritefp.write(command)
99 self.mtnwritefp.write(command)
100 self.mtnwritefp.flush()
100 self.mtnwritefp.flush()
101
101
102 return self.mtnstdioreadcommandoutput(command)
102 return self.mtnstdioreadcommandoutput(command)
103
103
104 def mtnstdioreadpacket(self):
104 def mtnstdioreadpacket(self):
105 read = None
105 read = None
106 commandnbr = ''
106 commandnbr = ''
107 while read != ':':
107 while read != ':':
108 read = self.mtnreadfp.read(1)
108 read = self.mtnreadfp.read(1)
109 if not read:
109 if not read:
110 raise util.Abort(_('bad mtn packet - no end of commandnbr'))
110 raise util.Abort(_('bad mtn packet - no end of commandnbr'))
111 commandnbr += read
111 commandnbr += read
112 commandnbr = commandnbr[:-1]
112 commandnbr = commandnbr[:-1]
113
113
114 stream = self.mtnreadfp.read(1)
114 stream = self.mtnreadfp.read(1)
115 if stream not in 'mewptl':
115 if stream not in 'mewptl':
116 raise util.Abort(_('bad mtn packet - bad stream type %s') % stream)
116 raise util.Abort(_('bad mtn packet - bad stream type %s') % stream)
117
117
118 read = self.mtnreadfp.read(1)
118 read = self.mtnreadfp.read(1)
119 if read != ':':
119 if read != ':':
120 raise util.Abort(_('bad mtn packet - no divider before size'))
120 raise util.Abort(_('bad mtn packet - no divider before size'))
121
121
122 read = None
122 read = None
123 lengthstr = ''
123 lengthstr = ''
124 while read != ':':
124 while read != ':':
125 read = self.mtnreadfp.read(1)
125 read = self.mtnreadfp.read(1)
126 if not read:
126 if not read:
127 raise util.Abort(_('bad mtn packet - no end of packet size'))
127 raise util.Abort(_('bad mtn packet - no end of packet size'))
128 lengthstr += read
128 lengthstr += read
129 try:
129 try:
130 length = long(lengthstr[:-1])
130 length = long(lengthstr[:-1])
131 except TypeError:
131 except TypeError:
132 raise util.Abort(_('bad mtn packet - bad packet size %s')
132 raise util.Abort(_('bad mtn packet - bad packet size %s')
133 % lengthstr)
133 % lengthstr)
134
134
135 read = self.mtnreadfp.read(length)
135 read = self.mtnreadfp.read(length)
136 if len(read) != length:
136 if len(read) != length:
137 raise util.Abort(_("bad mtn packet - unable to read full packet "
137 raise util.Abort(_("bad mtn packet - unable to read full packet "
138 "read %s of %s") % (len(read), length))
138 "read %s of %s") % (len(read), length))
139
139
140 return (commandnbr, stream, length, read)
140 return (commandnbr, stream, length, read)
141
141
142 def mtnstdioreadcommandoutput(self, command):
142 def mtnstdioreadcommandoutput(self, command):
143 retval = []
143 retval = []
144 while True:
144 while True:
145 commandnbr, stream, length, output = self.mtnstdioreadpacket()
145 commandnbr, stream, length, output = self.mtnstdioreadpacket()
146 self.ui.debug('mtn: read packet %s:%s:%s\n' %
146 self.ui.debug('mtn: read packet %s:%s:%s\n' %
147 (commandnbr, stream, length))
147 (commandnbr, stream, length))
148
148
149 if stream == 'l':
149 if stream == 'l':
150 # End of command
150 # End of command
151 if output != '0':
151 if output != '0':
152 raise util.Abort(_("mtn command '%s' returned %s") %
152 raise util.Abort(_("mtn command '%s' returned %s") %
153 (command, output))
153 (command, output))
154 break
154 break
155 elif stream in 'ew':
155 elif stream in 'ew':
156 # Error, warning output
156 # Error, warning output
157 self.ui.warn(_('%s error:\n') % self.command)
157 self.ui.warn(_('%s error:\n') % self.command)
158 self.ui.warn(output)
158 self.ui.warn(output)
159 elif stream == 'p':
159 elif stream == 'p':
160 # Progress messages
160 # Progress messages
161 self.ui.debug('mtn: ' + output)
161 self.ui.debug('mtn: ' + output)
162 elif stream == 'm':
162 elif stream == 'm':
163 # Main stream - command output
163 # Main stream - command output
164 retval.append(output)
164 retval.append(output)
165
165
166 return ''.join(retval)
166 return ''.join(retval)
167
167
168 def mtnloadmanifest(self, rev):
168 def mtnloadmanifest(self, rev):
169 if self.manifest_rev == rev:
169 if self.manifest_rev == rev:
170 return
170 return
171 self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n")
171 self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n")
172 self.manifest_rev = rev
172 self.manifest_rev = rev
173 self.files = {}
173 self.files = {}
174 self.dirs = {}
174 self.dirs = {}
175
175
176 for e in self.manifest:
176 for e in self.manifest:
177 m = self.file_re.match(e)
177 m = self.file_re.match(e)
178 if m:
178 if m:
179 attr = ""
179 attr = ""
180 name = m.group(1)
180 name = m.group(1)
181 node = m.group(2)
181 node = m.group(2)
182 if self.attr_execute_re.match(e):
182 if self.attr_execute_re.match(e):
183 attr += "x"
183 attr += "x"
184 self.files[name] = (node, attr)
184 self.files[name] = (node, attr)
185 m = self.dir_re.match(e)
185 m = self.dir_re.match(e)
186 if m:
186 if m:
187 self.dirs[m.group(1)] = True
187 self.dirs[m.group(1)] = True
188
188
189 def mtnisfile(self, name, rev):
189 def mtnisfile(self, name, rev):
190 # a non-file could be a directory or a deleted or renamed file
190 # a non-file could be a directory or a deleted or renamed file
191 self.mtnloadmanifest(rev)
191 self.mtnloadmanifest(rev)
192 return name in self.files
192 return name in self.files
193
193
194 def mtnisdir(self, name, rev):
194 def mtnisdir(self, name, rev):
195 self.mtnloadmanifest(rev)
195 self.mtnloadmanifest(rev)
196 return name in self.dirs
196 return name in self.dirs
197
197
198 def mtngetcerts(self, rev):
198 def mtngetcerts(self, rev):
199 certs = {"author":"<missing>", "date":"<missing>",
199 certs = {"author":"<missing>", "date":"<missing>",
200 "changelog":"<missing>", "branch":"<missing>"}
200 "changelog":"<missing>", "branch":"<missing>"}
201 certlist = self.mtnrun("certs", rev)
201 certlist = self.mtnrun("certs", rev)
202 # mtn < 0.45:
202 # mtn < 0.45:
203 # key "test@selenic.com"
203 # key "test@selenic.com"
204 # mtn >= 0.45:
204 # mtn >= 0.45:
205 # key [ff58a7ffb771907c4ff68995eada1c4da068d328]
205 # key [ff58a7ffb771907c4ff68995eada1c4da068d328]
206 certlist = re.split('\n\n key ["\[]', certlist)
206 certlist = re.split('\n\n key ["\[]', certlist)
207 for e in certlist:
207 for e in certlist:
208 m = self.cert_re.match(e)
208 m = self.cert_re.match(e)
209 if m:
209 if m:
210 name, value = m.groups()
210 name, value = m.groups()
211 value = value.replace(r'\"', '"')
211 value = value.replace(r'\"', '"')
212 value = value.replace(r'\\', '\\')
212 value = value.replace(r'\\', '\\')
213 certs[name] = value
213 certs[name] = value
214 # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
214 # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
215 # and all times are stored in UTC
215 # and all times are stored in UTC
216 certs["date"] = certs["date"].split('.')[0] + " UTC"
216 certs["date"] = certs["date"].split('.')[0] + " UTC"
217 return certs
217 return certs
218
218
219 # implement the converter_source interface:
219 # implement the converter_source interface:
220
220
221 def getheads(self):
221 def getheads(self):
222 if not self.rev:
222 if not self.rev:
223 return self.mtnrun("leaves").splitlines()
223 return self.mtnrun("leaves").splitlines()
224 else:
224 else:
225 return [self.rev]
225 return [self.rev]
226
226
227 def getchanges(self, rev):
227 def getchanges(self, rev):
228 #revision = self.mtncmd("get_revision %s" % rev).split("\n\n")
228 #revision = self.mtncmd("get_revision %s" % rev).split("\n\n")
229 revision = self.mtnrun("get_revision", rev).split("\n\n")
229 revision = self.mtnrun("get_revision", rev).split("\n\n")
230 files = {}
230 files = {}
231 ignoremove = {}
231 ignoremove = {}
232 renameddirs = []
232 renameddirs = []
233 copies = {}
233 copies = {}
234 for e in revision:
234 for e in revision:
235 m = self.add_file_re.match(e)
235 m = self.add_file_re.match(e)
236 if m:
236 if m:
237 files[m.group(1)] = rev
237 files[m.group(1)] = rev
238 ignoremove[m.group(1)] = rev
238 ignoremove[m.group(1)] = rev
239 m = self.patch_re.match(e)
239 m = self.patch_re.match(e)
240 if m:
240 if m:
241 files[m.group(1)] = rev
241 files[m.group(1)] = rev
242 # Delete/rename is handled later when the convert engine
242 # Delete/rename is handled later when the convert engine
243 # discovers an IOError exception from getfile,
243 # discovers an IOError exception from getfile,
244 # but only if we add the "from" file to the list of changes.
244 # but only if we add the "from" file to the list of changes.
245 m = self.delete_re.match(e)
245 m = self.delete_re.match(e)
246 if m:
246 if m:
247 files[m.group(1)] = rev
247 files[m.group(1)] = rev
248 m = self.rename_re.match(e)
248 m = self.rename_re.match(e)
249 if m:
249 if m:
250 toname = m.group(2)
250 toname = m.group(2)
251 fromname = m.group(1)
251 fromname = m.group(1)
252 if self.mtnisfile(toname, rev):
252 if self.mtnisfile(toname, rev):
253 ignoremove[toname] = 1
253 ignoremove[toname] = 1
254 copies[toname] = fromname
254 copies[toname] = fromname
255 files[toname] = rev
255 files[toname] = rev
256 files[fromname] = rev
256 files[fromname] = rev
257 elif self.mtnisdir(toname, rev):
257 elif self.mtnisdir(toname, rev):
258 renameddirs.append((fromname, toname))
258 renameddirs.append((fromname, toname))
259
259
260 # Directory renames can be handled only once we have recorded
260 # Directory renames can be handled only once we have recorded
261 # all new files
261 # all new files
262 for fromdir, todir in renameddirs:
262 for fromdir, todir in renameddirs:
263 renamed = {}
263 renamed = {}
264 for tofile in self.files:
264 for tofile in self.files:
265 if tofile in ignoremove:
265 if tofile in ignoremove:
266 continue
266 continue
267 if tofile.startswith(todir + '/'):
267 if tofile.startswith(todir + '/'):
268 renamed[tofile] = fromdir + tofile[len(todir):]
268 renamed[tofile] = fromdir + tofile[len(todir):]
269 # Avoid chained moves like:
269 # Avoid chained moves like:
270 # d1(/a) => d3/d1(/a)
270 # d1(/a) => d3/d1(/a)
271 # d2 => d3
271 # d2 => d3
272 ignoremove[tofile] = 1
272 ignoremove[tofile] = 1
273 for tofile, fromfile in renamed.items():
273 for tofile, fromfile in renamed.items():
274 self.ui.debug (_("copying file in renamed directory "
274 self.ui.debug (_("copying file in renamed directory "
275 "from '%s' to '%s'")
275 "from '%s' to '%s'")
276 % (fromfile, tofile), '\n')
276 % (fromfile, tofile), '\n')
277 files[tofile] = rev
277 files[tofile] = rev
278 copies[tofile] = fromfile
278 copies[tofile] = fromfile
279 for fromfile in renamed.values():
279 for fromfile in renamed.values():
280 files[fromfile] = rev
280 files[fromfile] = rev
281
281
282 return (files.items(), copies)
282 return (files.items(), copies)
283
283
284 def getfile(self, name, rev):
284 def getfile(self, name, rev):
285 if not self.mtnisfile(name, rev):
285 if not self.mtnisfile(name, rev):
286 raise IOError # file was deleted or renamed
286 raise IOError # file was deleted or renamed
287 try:
287 try:
288 data = self.mtnrun("get_file_of", name, r=rev)
288 data = self.mtnrun("get_file_of", name, r=rev)
289 except:
289 except:
290 raise IOError # file was deleted or renamed
290 raise IOError # file was deleted or renamed
291 self.mtnloadmanifest(rev)
291 self.mtnloadmanifest(rev)
292 node, attr = self.files.get(name, (None, ""))
292 node, attr = self.files.get(name, (None, ""))
293 return data, attr
293 return data, attr
294
294
295 def getcommit(self, rev):
295 def getcommit(self, rev):
296 extra = {}
296 extra = {}
297 certs = self.mtngetcerts(rev)
297 certs = self.mtngetcerts(rev)
298 if certs.get('suspend') == certs["branch"]:
298 if certs.get('suspend') == certs["branch"]:
299 extra['close'] = '1'
299 extra['close'] = '1'
300 return commit(
300 return commit(
301 author=certs["author"],
301 author=certs["author"],
302 date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")),
302 date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")),
303 desc=certs["changelog"],
303 desc=certs["changelog"],
304 rev=rev,
304 rev=rev,
305 parents=self.mtnrun("parents", rev).splitlines(),
305 parents=self.mtnrun("parents", rev).splitlines(),
306 branch=certs["branch"],
306 branch=certs["branch"],
307 extra=extra)
307 extra=extra)
308
308
309 def gettags(self):
309 def gettags(self):
310 tags = {}
310 tags = {}
311 for e in self.mtnrun("tags").split("\n\n"):
311 for e in self.mtnrun("tags").split("\n\n"):
312 m = self.tag_re.match(e)
312 m = self.tag_re.match(e)
313 if m:
313 if m:
314 tags[m.group(1)] = m.group(2)
314 tags[m.group(1)] = m.group(2)
315 return tags
315 return tags
316
316
317 def getchangedfiles(self, rev, i):
317 def getchangedfiles(self, rev, i):
318 # This function is only needed to support --filemap
318 # This function is only needed to support --filemap
319 # ... and we don't support that
319 # ... and we don't support that
320 raise NotImplementedError
320 raise NotImplementedError
321
321
322 def before(self):
322 def before(self):
323 # Check if we have a new enough version to use automate stdio
323 # Check if we have a new enough version to use automate stdio
324 version = 0.0
324 version = 0.0
325 try:
325 try:
326 versionstr = self.mtnrunsingle("interface_version")
326 versionstr = self.mtnrunsingle("interface_version")
327 version = float(versionstr)
327 version = float(versionstr)
328 except Exception:
328 except Exception:
329 raise util.Abort(_("unable to determine mtn automate interface "
329 raise util.Abort(_("unable to determine mtn automate interface "
330 "version"))
330 "version"))
331
331
332 if version >= 12.0:
332 if version >= 12.0:
333 self.automatestdio = True
333 self.automatestdio = True
334 self.ui.debug("mtn automate version %s - using automate stdio\n" %
334 self.ui.debug("mtn automate version %s - using automate stdio\n" %
335 version)
335 version)
336
336
337 # launch the long-running automate stdio process
337 # launch the long-running automate stdio process
338 self.mtnwritefp, self.mtnreadfp = self._run2('automate', 'stdio',
338 self.mtnwritefp, self.mtnreadfp = self._run2('automate', 'stdio',
339 '-d', self.path)
339 '-d', self.path)
340 # read the headers
340 # read the headers
341 read = self.mtnreadfp.readline()
341 read = self.mtnreadfp.readline()
342 if read != 'format-version: 2\n':
342 if read != 'format-version: 2\n':
343 raise util.Abort(_('mtn automate stdio header unexpected: %s')
343 raise util.Abort(_('mtn automate stdio header unexpected: %s')
344 % read)
344 % read)
345 while read != '\n':
345 while read != '\n':
346 read = self.mtnreadfp.readline()
346 read = self.mtnreadfp.readline()
347 if not read:
347 if not read:
348 raise util.Abort(_("failed to reach end of mtn automate "
348 raise util.Abort(_("failed to reach end of mtn automate "
349 "stdio headers"))
349 "stdio headers"))
350 else:
350 else:
351 self.ui.debug("mtn automate version %s - not using automate stdio "
351 self.ui.debug("mtn automate version %s - not using automate stdio "
352 "(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version)
352 "(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version)
353
353
354 def after(self):
354 def after(self):
355 if self.automatestdio:
355 if self.automatestdio:
356 self.mtnwritefp.close()
356 self.mtnwritefp.close()
357 self.mtnwritefp = None
357 self.mtnwritefp = None
358 self.mtnreadfp.close()
358 self.mtnreadfp.close()
359 self.mtnreadfp = None
359 self.mtnreadfp = None
360
360
@@ -1,1252 +1,1252 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4
4
5 import os, re, sys, tempfile, urllib, urllib2, xml.dom.minidom
5 import os, re, sys, tempfile, urllib, urllib2, xml.dom.minidom
6 import cPickle as pickle
6 import cPickle as pickle
7
7
8 from mercurial import strutil, scmutil, util, encoding
8 from mercurial import strutil, scmutil, util, encoding
9 from mercurial.i18n import _
9 from mercurial.i18n import _
10
10
11 propertycache = util.propertycache
11 propertycache = util.propertycache
12
12
13 # Subversion stuff. Works best with very recent Python SVN bindings
13 # Subversion stuff. Works best with very recent Python SVN bindings
14 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
14 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
15 # these bindings.
15 # these bindings.
16
16
17 from cStringIO import StringIO
17 from cStringIO import StringIO
18
18
19 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
19 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
20 from common import commandline, converter_source, converter_sink, mapfile
20 from common import commandline, converter_source, converter_sink, mapfile
21
21
22 try:
22 try:
23 from svn.core import SubversionException, Pool
23 from svn.core import SubversionException, Pool
24 import svn
24 import svn
25 import svn.client
25 import svn.client
26 import svn.core
26 import svn.core
27 import svn.ra
27 import svn.ra
28 import svn.delta
28 import svn.delta
29 import transport
29 import transport
30 import warnings
30 import warnings
31 warnings.filterwarnings('ignore',
31 warnings.filterwarnings('ignore',
32 module='svn.core',
32 module='svn.core',
33 category=DeprecationWarning)
33 category=DeprecationWarning)
34
34
35 except ImportError:
35 except ImportError:
36 svn = None
36 svn = None
37
37
38 class SvnPathNotFound(Exception):
38 class SvnPathNotFound(Exception):
39 pass
39 pass
40
40
41 def revsplit(rev):
41 def revsplit(rev):
42 """Parse a revision string and return (uuid, path, revnum)."""
42 """Parse a revision string and return (uuid, path, revnum)."""
43 url, revnum = rev.rsplit('@', 1)
43 url, revnum = rev.rsplit('@', 1)
44 parts = url.split('/', 1)
44 parts = url.split('/', 1)
45 mod = ''
45 mod = ''
46 if len(parts) > 1:
46 if len(parts) > 1:
47 mod = '/' + parts[1]
47 mod = '/' + parts[1]
48 return parts[0][4:], mod, int(revnum)
48 return parts[0][4:], mod, int(revnum)
49
49
50 def quote(s):
50 def quote(s):
51 # As of svn 1.7, many svn calls expect "canonical" paths. In
51 # As of svn 1.7, many svn calls expect "canonical" paths. In
52 # theory, we should call svn.core.*canonicalize() on all paths
52 # theory, we should call svn.core.*canonicalize() on all paths
53 # before passing them to the API. Instead, we assume the base url
53 # before passing them to the API. Instead, we assume the base url
54 # is canonical and copy the behaviour of svn URL encoding function
54 # is canonical and copy the behaviour of svn URL encoding function
55 # so we can extend it safely with new components. The "safe"
55 # so we can extend it safely with new components. The "safe"
56 # characters were taken from the "svn_uri__char_validity" table in
56 # characters were taken from the "svn_uri__char_validity" table in
57 # libsvn_subr/path.c.
57 # libsvn_subr/path.c.
58 return urllib.quote(s, "!$&'()*+,-./:=@_~")
58 return urllib.quote(s, "!$&'()*+,-./:=@_~")
59
59
60 def geturl(path):
60 def geturl(path):
61 try:
61 try:
62 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
62 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
63 except SubversionException:
63 except SubversionException:
64 # svn.client.url_from_path() fails with local repositories
64 # svn.client.url_from_path() fails with local repositories
65 pass
65 pass
66 if os.path.isdir(path):
66 if os.path.isdir(path):
67 path = os.path.normpath(os.path.abspath(path))
67 path = os.path.normpath(os.path.abspath(path))
68 if os.name == 'nt':
68 if os.name == 'nt':
69 path = '/' + util.normpath(path)
69 path = '/' + util.normpath(path)
70 # Module URL is later compared with the repository URL returned
70 # Module URL is later compared with the repository URL returned
71 # by svn API, which is UTF-8.
71 # by svn API, which is UTF-8.
72 path = encoding.tolocal(path)
72 path = encoding.tolocal(path)
73 path = 'file://%s' % quote(path)
73 path = 'file://%s' % quote(path)
74 return svn.core.svn_path_canonicalize(path)
74 return svn.core.svn_path_canonicalize(path)
75
75
76 def optrev(number):
76 def optrev(number):
77 optrev = svn.core.svn_opt_revision_t()
77 optrev = svn.core.svn_opt_revision_t()
78 optrev.kind = svn.core.svn_opt_revision_number
78 optrev.kind = svn.core.svn_opt_revision_number
79 optrev.value.number = number
79 optrev.value.number = number
80 return optrev
80 return optrev
81
81
82 class changedpath(object):
82 class changedpath(object):
83 def __init__(self, p):
83 def __init__(self, p):
84 self.copyfrom_path = p.copyfrom_path
84 self.copyfrom_path = p.copyfrom_path
85 self.copyfrom_rev = p.copyfrom_rev
85 self.copyfrom_rev = p.copyfrom_rev
86 self.action = p.action
86 self.action = p.action
87
87
88 def get_log_child(fp, url, paths, start, end, limit=0,
88 def get_log_child(fp, url, paths, start, end, limit=0,
89 discover_changed_paths=True, strict_node_history=False):
89 discover_changed_paths=True, strict_node_history=False):
90 protocol = -1
90 protocol = -1
91 def receiver(orig_paths, revnum, author, date, message, pool):
91 def receiver(orig_paths, revnum, author, date, message, pool):
92 if orig_paths is not None:
92 if orig_paths is not None:
93 for k, v in orig_paths.iteritems():
93 for k, v in orig_paths.iteritems():
94 orig_paths[k] = changedpath(v)
94 orig_paths[k] = changedpath(v)
95 pickle.dump((orig_paths, revnum, author, date, message),
95 pickle.dump((orig_paths, revnum, author, date, message),
96 fp, protocol)
96 fp, protocol)
97
97
98 try:
98 try:
99 # Use an ra of our own so that our parent can consume
99 # Use an ra of our own so that our parent can consume
100 # our results without confusing the server.
100 # our results without confusing the server.
101 t = transport.SvnRaTransport(url=url)
101 t = transport.SvnRaTransport(url=url)
102 svn.ra.get_log(t.ra, paths, start, end, limit,
102 svn.ra.get_log(t.ra, paths, start, end, limit,
103 discover_changed_paths,
103 discover_changed_paths,
104 strict_node_history,
104 strict_node_history,
105 receiver)
105 receiver)
106 except IOError:
106 except IOError:
107 # Caller may interrupt the iteration
107 # Caller may interrupt the iteration
108 pickle.dump(None, fp, protocol)
108 pickle.dump(None, fp, protocol)
109 except Exception, inst:
109 except Exception, inst:
110 pickle.dump(str(inst), fp, protocol)
110 pickle.dump(str(inst), fp, protocol)
111 else:
111 else:
112 pickle.dump(None, fp, protocol)
112 pickle.dump(None, fp, protocol)
113 fp.close()
113 fp.close()
114 # With large history, cleanup process goes crazy and suddenly
114 # With large history, cleanup process goes crazy and suddenly
115 # consumes *huge* amount of memory. The output file being closed,
115 # consumes *huge* amount of memory. The output file being closed,
116 # there is no need for clean termination.
116 # there is no need for clean termination.
117 os._exit(0)
117 os._exit(0)
118
118
119 def debugsvnlog(ui, **opts):
119 def debugsvnlog(ui, **opts):
120 """Fetch SVN log in a subprocess and channel them back to parent to
120 """Fetch SVN log in a subprocess and channel them back to parent to
121 avoid memory collection issues.
121 avoid memory collection issues.
122 """
122 """
123 util.setbinary(sys.stdin)
123 util.setbinary(sys.stdin)
124 util.setbinary(sys.stdout)
124 util.setbinary(sys.stdout)
125 args = decodeargs(sys.stdin.read())
125 args = decodeargs(sys.stdin.read())
126 get_log_child(sys.stdout, *args)
126 get_log_child(sys.stdout, *args)
127
127
128 class logstream(object):
128 class logstream(object):
129 """Interruptible revision log iterator."""
129 """Interruptible revision log iterator."""
130 def __init__(self, stdout):
130 def __init__(self, stdout):
131 self._stdout = stdout
131 self._stdout = stdout
132
132
133 def __iter__(self):
133 def __iter__(self):
134 while True:
134 while True:
135 try:
135 try:
136 entry = pickle.load(self._stdout)
136 entry = pickle.load(self._stdout)
137 except EOFError:
137 except EOFError:
138 raise util.Abort(_('Mercurial failed to run itself, check'
138 raise util.Abort(_('Mercurial failed to run itself, check'
139 ' hg executable is in PATH'))
139 ' hg executable is in PATH'))
140 try:
140 try:
141 orig_paths, revnum, author, date, message = entry
141 orig_paths, revnum, author, date, message = entry
142 except:
142 except (TypeError, ValueError):
143 if entry is None:
143 if entry is None:
144 break
144 break
145 raise util.Abort(_("log stream exception '%s'") % entry)
145 raise util.Abort(_("log stream exception '%s'") % entry)
146 yield entry
146 yield entry
147
147
148 def close(self):
148 def close(self):
149 if self._stdout:
149 if self._stdout:
150 self._stdout.close()
150 self._stdout.close()
151 self._stdout = None
151 self._stdout = None
152
152
153
153
154 # Check to see if the given path is a local Subversion repo. Verify this by
154 # Check to see if the given path is a local Subversion repo. Verify this by
155 # looking for several svn-specific files and directories in the given
155 # looking for several svn-specific files and directories in the given
156 # directory.
156 # directory.
157 def filecheck(ui, path, proto):
157 def filecheck(ui, path, proto):
158 for x in ('locks', 'hooks', 'format', 'db'):
158 for x in ('locks', 'hooks', 'format', 'db'):
159 if not os.path.exists(os.path.join(path, x)):
159 if not os.path.exists(os.path.join(path, x)):
160 return False
160 return False
161 return True
161 return True
162
162
163 # Check to see if a given path is the root of an svn repo over http. We verify
163 # Check to see if a given path is the root of an svn repo over http. We verify
164 # this by requesting a version-controlled URL we know can't exist and looking
164 # this by requesting a version-controlled URL we know can't exist and looking
165 # for the svn-specific "not found" XML.
165 # for the svn-specific "not found" XML.
166 def httpcheck(ui, path, proto):
166 def httpcheck(ui, path, proto):
167 try:
167 try:
168 opener = urllib2.build_opener()
168 opener = urllib2.build_opener()
169 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
169 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
170 data = rsp.read()
170 data = rsp.read()
171 except urllib2.HTTPError, inst:
171 except urllib2.HTTPError, inst:
172 if inst.code != 404:
172 if inst.code != 404:
173 # Except for 404 we cannot know for sure this is not an svn repo
173 # Except for 404 we cannot know for sure this is not an svn repo
174 ui.warn(_('svn: cannot probe remote repository, assume it could '
174 ui.warn(_('svn: cannot probe remote repository, assume it could '
175 'be a subversion repository. Use --source-type if you '
175 'be a subversion repository. Use --source-type if you '
176 'know better.\n'))
176 'know better.\n'))
177 return True
177 return True
178 data = inst.fp.read()
178 data = inst.fp.read()
179 except:
179 except:
180 # Could be urllib2.URLError if the URL is invalid or anything else.
180 # Could be urllib2.URLError if the URL is invalid or anything else.
181 return False
181 return False
182 return '<m:human-readable errcode="160013">' in data
182 return '<m:human-readable errcode="160013">' in data
183
183
184 protomap = {'http': httpcheck,
184 protomap = {'http': httpcheck,
185 'https': httpcheck,
185 'https': httpcheck,
186 'file': filecheck,
186 'file': filecheck,
187 }
187 }
188 def issvnurl(ui, url):
188 def issvnurl(ui, url):
189 try:
189 try:
190 proto, path = url.split('://', 1)
190 proto, path = url.split('://', 1)
191 if proto == 'file':
191 if proto == 'file':
192 path = urllib.url2pathname(path)
192 path = urllib.url2pathname(path)
193 except ValueError:
193 except ValueError:
194 proto = 'file'
194 proto = 'file'
195 path = os.path.abspath(url)
195 path = os.path.abspath(url)
196 if proto == 'file':
196 if proto == 'file':
197 path = util.pconvert(path)
197 path = util.pconvert(path)
198 check = protomap.get(proto, lambda *args: False)
198 check = protomap.get(proto, lambda *args: False)
199 while '/' in path:
199 while '/' in path:
200 if check(ui, path, proto):
200 if check(ui, path, proto):
201 return True
201 return True
202 path = path.rsplit('/', 1)[0]
202 path = path.rsplit('/', 1)[0]
203 return False
203 return False
204
204
205 # SVN conversion code stolen from bzr-svn and tailor
205 # SVN conversion code stolen from bzr-svn and tailor
206 #
206 #
207 # Subversion looks like a versioned filesystem, branches structures
207 # Subversion looks like a versioned filesystem, branches structures
208 # are defined by conventions and not enforced by the tool. First,
208 # are defined by conventions and not enforced by the tool. First,
209 # we define the potential branches (modules) as "trunk" and "branches"
209 # we define the potential branches (modules) as "trunk" and "branches"
210 # children directories. Revisions are then identified by their
210 # children directories. Revisions are then identified by their
211 # module and revision number (and a repository identifier).
211 # module and revision number (and a repository identifier).
212 #
212 #
213 # The revision graph is really a tree (or a forest). By default, a
213 # The revision graph is really a tree (or a forest). By default, a
214 # revision parent is the previous revision in the same module. If the
214 # revision parent is the previous revision in the same module. If the
215 # module directory is copied/moved from another module then the
215 # module directory is copied/moved from another module then the
216 # revision is the module root and its parent the source revision in
216 # revision is the module root and its parent the source revision in
217 # the parent module. A revision has at most one parent.
217 # the parent module. A revision has at most one parent.
218 #
218 #
219 class svn_source(converter_source):
219 class svn_source(converter_source):
220 def __init__(self, ui, url, rev=None):
220 def __init__(self, ui, url, rev=None):
221 super(svn_source, self).__init__(ui, url, rev=rev)
221 super(svn_source, self).__init__(ui, url, rev=rev)
222
222
223 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
223 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
224 (os.path.exists(url) and
224 (os.path.exists(url) and
225 os.path.exists(os.path.join(url, '.svn'))) or
225 os.path.exists(os.path.join(url, '.svn'))) or
226 issvnurl(ui, url)):
226 issvnurl(ui, url)):
227 raise NoRepo(_("%s does not look like a Subversion repository")
227 raise NoRepo(_("%s does not look like a Subversion repository")
228 % url)
228 % url)
229 if svn is None:
229 if svn is None:
230 raise MissingTool(_('Could not load Subversion python bindings'))
230 raise MissingTool(_('Could not load Subversion python bindings'))
231
231
232 try:
232 try:
233 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
233 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
234 if version < (1, 4):
234 if version < (1, 4):
235 raise MissingTool(_('Subversion python bindings %d.%d found, '
235 raise MissingTool(_('Subversion python bindings %d.%d found, '
236 '1.4 or later required') % version)
236 '1.4 or later required') % version)
237 except AttributeError:
237 except AttributeError:
238 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
238 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
239 'or later required'))
239 'or later required'))
240
240
241 self.lastrevs = {}
241 self.lastrevs = {}
242
242
243 latest = None
243 latest = None
244 try:
244 try:
245 # Support file://path@rev syntax. Useful e.g. to convert
245 # Support file://path@rev syntax. Useful e.g. to convert
246 # deleted branches.
246 # deleted branches.
247 at = url.rfind('@')
247 at = url.rfind('@')
248 if at >= 0:
248 if at >= 0:
249 latest = int(url[at + 1:])
249 latest = int(url[at + 1:])
250 url = url[:at]
250 url = url[:at]
251 except ValueError:
251 except ValueError:
252 pass
252 pass
253 self.url = geturl(url)
253 self.url = geturl(url)
254 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
254 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
255 try:
255 try:
256 self.transport = transport.SvnRaTransport(url=self.url)
256 self.transport = transport.SvnRaTransport(url=self.url)
257 self.ra = self.transport.ra
257 self.ra = self.transport.ra
258 self.ctx = self.transport.client
258 self.ctx = self.transport.client
259 self.baseurl = svn.ra.get_repos_root(self.ra)
259 self.baseurl = svn.ra.get_repos_root(self.ra)
260 # Module is either empty or a repository path starting with
260 # Module is either empty or a repository path starting with
261 # a slash and not ending with a slash.
261 # a slash and not ending with a slash.
262 self.module = urllib.unquote(self.url[len(self.baseurl):])
262 self.module = urllib.unquote(self.url[len(self.baseurl):])
263 self.prevmodule = None
263 self.prevmodule = None
264 self.rootmodule = self.module
264 self.rootmodule = self.module
265 self.commits = {}
265 self.commits = {}
266 self.paths = {}
266 self.paths = {}
267 self.uuid = svn.ra.get_uuid(self.ra)
267 self.uuid = svn.ra.get_uuid(self.ra)
268 except SubversionException:
268 except SubversionException:
269 ui.traceback()
269 ui.traceback()
270 raise NoRepo(_("%s does not look like a Subversion repository")
270 raise NoRepo(_("%s does not look like a Subversion repository")
271 % self.url)
271 % self.url)
272
272
273 if rev:
273 if rev:
274 try:
274 try:
275 latest = int(rev)
275 latest = int(rev)
276 except ValueError:
276 except ValueError:
277 raise util.Abort(_('svn: revision %s is not an integer') % rev)
277 raise util.Abort(_('svn: revision %s is not an integer') % rev)
278
278
279 self.trunkname = self.ui.config('convert', 'svn.trunk',
279 self.trunkname = self.ui.config('convert', 'svn.trunk',
280 'trunk').strip('/')
280 'trunk').strip('/')
281 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
281 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
282 try:
282 try:
283 self.startrev = int(self.startrev)
283 self.startrev = int(self.startrev)
284 if self.startrev < 0:
284 if self.startrev < 0:
285 self.startrev = 0
285 self.startrev = 0
286 except ValueError:
286 except ValueError:
287 raise util.Abort(_('svn: start revision %s is not an integer')
287 raise util.Abort(_('svn: start revision %s is not an integer')
288 % self.startrev)
288 % self.startrev)
289
289
290 try:
290 try:
291 self.head = self.latest(self.module, latest)
291 self.head = self.latest(self.module, latest)
292 except SvnPathNotFound:
292 except SvnPathNotFound:
293 self.head = None
293 self.head = None
294 if not self.head:
294 if not self.head:
295 raise util.Abort(_('no revision found in module %s')
295 raise util.Abort(_('no revision found in module %s')
296 % self.module)
296 % self.module)
297 self.last_changed = self.revnum(self.head)
297 self.last_changed = self.revnum(self.head)
298
298
299 self._changescache = None
299 self._changescache = None
300
300
301 if os.path.exists(os.path.join(url, '.svn/entries')):
301 if os.path.exists(os.path.join(url, '.svn/entries')):
302 self.wc = url
302 self.wc = url
303 else:
303 else:
304 self.wc = None
304 self.wc = None
305 self.convertfp = None
305 self.convertfp = None
306
306
307 def setrevmap(self, revmap):
307 def setrevmap(self, revmap):
308 lastrevs = {}
308 lastrevs = {}
309 for revid in revmap.iterkeys():
309 for revid in revmap.iterkeys():
310 uuid, module, revnum = revsplit(revid)
310 uuid, module, revnum = revsplit(revid)
311 lastrevnum = lastrevs.setdefault(module, revnum)
311 lastrevnum = lastrevs.setdefault(module, revnum)
312 if revnum > lastrevnum:
312 if revnum > lastrevnum:
313 lastrevs[module] = revnum
313 lastrevs[module] = revnum
314 self.lastrevs = lastrevs
314 self.lastrevs = lastrevs
315
315
316 def exists(self, path, optrev):
316 def exists(self, path, optrev):
317 try:
317 try:
318 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
318 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
319 optrev, False, self.ctx)
319 optrev, False, self.ctx)
320 return True
320 return True
321 except SubversionException:
321 except SubversionException:
322 return False
322 return False
323
323
324 def getheads(self):
324 def getheads(self):
325
325
326 def isdir(path, revnum):
326 def isdir(path, revnum):
327 kind = self._checkpath(path, revnum)
327 kind = self._checkpath(path, revnum)
328 return kind == svn.core.svn_node_dir
328 return kind == svn.core.svn_node_dir
329
329
330 def getcfgpath(name, rev):
330 def getcfgpath(name, rev):
331 cfgpath = self.ui.config('convert', 'svn.' + name)
331 cfgpath = self.ui.config('convert', 'svn.' + name)
332 if cfgpath is not None and cfgpath.strip() == '':
332 if cfgpath is not None and cfgpath.strip() == '':
333 return None
333 return None
334 path = (cfgpath or name).strip('/')
334 path = (cfgpath or name).strip('/')
335 if not self.exists(path, rev):
335 if not self.exists(path, rev):
336 if self.module.endswith(path) and name == 'trunk':
336 if self.module.endswith(path) and name == 'trunk':
337 # we are converting from inside this directory
337 # we are converting from inside this directory
338 return None
338 return None
339 if cfgpath:
339 if cfgpath:
340 raise util.Abort(_('expected %s to be at %r, but not found')
340 raise util.Abort(_('expected %s to be at %r, but not found')
341 % (name, path))
341 % (name, path))
342 return None
342 return None
343 self.ui.note(_('found %s at %r\n') % (name, path))
343 self.ui.note(_('found %s at %r\n') % (name, path))
344 return path
344 return path
345
345
346 rev = optrev(self.last_changed)
346 rev = optrev(self.last_changed)
347 oldmodule = ''
347 oldmodule = ''
348 trunk = getcfgpath('trunk', rev)
348 trunk = getcfgpath('trunk', rev)
349 self.tags = getcfgpath('tags', rev)
349 self.tags = getcfgpath('tags', rev)
350 branches = getcfgpath('branches', rev)
350 branches = getcfgpath('branches', rev)
351
351
352 # If the project has a trunk or branches, we will extract heads
352 # If the project has a trunk or branches, we will extract heads
353 # from them. We keep the project root otherwise.
353 # from them. We keep the project root otherwise.
354 if trunk:
354 if trunk:
355 oldmodule = self.module or ''
355 oldmodule = self.module or ''
356 self.module += '/' + trunk
356 self.module += '/' + trunk
357 self.head = self.latest(self.module, self.last_changed)
357 self.head = self.latest(self.module, self.last_changed)
358 if not self.head:
358 if not self.head:
359 raise util.Abort(_('no revision found in module %s')
359 raise util.Abort(_('no revision found in module %s')
360 % self.module)
360 % self.module)
361
361
362 # First head in the list is the module's head
362 # First head in the list is the module's head
363 self.heads = [self.head]
363 self.heads = [self.head]
364 if self.tags is not None:
364 if self.tags is not None:
365 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
365 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
366
366
367 # Check if branches bring a few more heads to the list
367 # Check if branches bring a few more heads to the list
368 if branches:
368 if branches:
369 rpath = self.url.strip('/')
369 rpath = self.url.strip('/')
370 branchnames = svn.client.ls(rpath + '/' + quote(branches),
370 branchnames = svn.client.ls(rpath + '/' + quote(branches),
371 rev, False, self.ctx)
371 rev, False, self.ctx)
372 for branch in branchnames.keys():
372 for branch in branchnames.keys():
373 module = '%s/%s/%s' % (oldmodule, branches, branch)
373 module = '%s/%s/%s' % (oldmodule, branches, branch)
374 if not isdir(module, self.last_changed):
374 if not isdir(module, self.last_changed):
375 continue
375 continue
376 brevid = self.latest(module, self.last_changed)
376 brevid = self.latest(module, self.last_changed)
377 if not brevid:
377 if not brevid:
378 self.ui.note(_('ignoring empty branch %s\n') % branch)
378 self.ui.note(_('ignoring empty branch %s\n') % branch)
379 continue
379 continue
380 self.ui.note(_('found branch %s at %d\n') %
380 self.ui.note(_('found branch %s at %d\n') %
381 (branch, self.revnum(brevid)))
381 (branch, self.revnum(brevid)))
382 self.heads.append(brevid)
382 self.heads.append(brevid)
383
383
384 if self.startrev and self.heads:
384 if self.startrev and self.heads:
385 if len(self.heads) > 1:
385 if len(self.heads) > 1:
386 raise util.Abort(_('svn: start revision is not supported '
386 raise util.Abort(_('svn: start revision is not supported '
387 'with more than one branch'))
387 'with more than one branch'))
388 revnum = self.revnum(self.heads[0])
388 revnum = self.revnum(self.heads[0])
389 if revnum < self.startrev:
389 if revnum < self.startrev:
390 raise util.Abort(
390 raise util.Abort(
391 _('svn: no revision found after start revision %d')
391 _('svn: no revision found after start revision %d')
392 % self.startrev)
392 % self.startrev)
393
393
394 return self.heads
394 return self.heads
395
395
396 def getchanges(self, rev):
396 def getchanges(self, rev):
397 if self._changescache and self._changescache[0] == rev:
397 if self._changescache and self._changescache[0] == rev:
398 return self._changescache[1]
398 return self._changescache[1]
399 self._changescache = None
399 self._changescache = None
400 (paths, parents) = self.paths[rev]
400 (paths, parents) = self.paths[rev]
401 if parents:
401 if parents:
402 files, self.removed, copies = self.expandpaths(rev, paths, parents)
402 files, self.removed, copies = self.expandpaths(rev, paths, parents)
403 else:
403 else:
404 # Perform a full checkout on roots
404 # Perform a full checkout on roots
405 uuid, module, revnum = revsplit(rev)
405 uuid, module, revnum = revsplit(rev)
406 entries = svn.client.ls(self.baseurl + quote(module),
406 entries = svn.client.ls(self.baseurl + quote(module),
407 optrev(revnum), True, self.ctx)
407 optrev(revnum), True, self.ctx)
408 files = [n for n, e in entries.iteritems()
408 files = [n for n, e in entries.iteritems()
409 if e.kind == svn.core.svn_node_file]
409 if e.kind == svn.core.svn_node_file]
410 copies = {}
410 copies = {}
411 self.removed = set()
411 self.removed = set()
412
412
413 files.sort()
413 files.sort()
414 files = zip(files, [rev] * len(files))
414 files = zip(files, [rev] * len(files))
415
415
416 # caller caches the result, so free it here to release memory
416 # caller caches the result, so free it here to release memory
417 del self.paths[rev]
417 del self.paths[rev]
418 return (files, copies)
418 return (files, copies)
419
419
420 def getchangedfiles(self, rev, i):
420 def getchangedfiles(self, rev, i):
421 changes = self.getchanges(rev)
421 changes = self.getchanges(rev)
422 self._changescache = (rev, changes)
422 self._changescache = (rev, changes)
423 return [f[0] for f in changes[0]]
423 return [f[0] for f in changes[0]]
424
424
425 def getcommit(self, rev):
425 def getcommit(self, rev):
426 if rev not in self.commits:
426 if rev not in self.commits:
427 uuid, module, revnum = revsplit(rev)
427 uuid, module, revnum = revsplit(rev)
428 self.module = module
428 self.module = module
429 self.reparent(module)
429 self.reparent(module)
430 # We assume that:
430 # We assume that:
431 # - requests for revisions after "stop" come from the
431 # - requests for revisions after "stop" come from the
432 # revision graph backward traversal. Cache all of them
432 # revision graph backward traversal. Cache all of them
433 # down to stop, they will be used eventually.
433 # down to stop, they will be used eventually.
434 # - requests for revisions before "stop" come to get
434 # - requests for revisions before "stop" come to get
435 # isolated branches parents. Just fetch what is needed.
435 # isolated branches parents. Just fetch what is needed.
436 stop = self.lastrevs.get(module, 0)
436 stop = self.lastrevs.get(module, 0)
437 if revnum < stop:
437 if revnum < stop:
438 stop = revnum + 1
438 stop = revnum + 1
439 self._fetch_revisions(revnum, stop)
439 self._fetch_revisions(revnum, stop)
440 if rev not in self.commits:
440 if rev not in self.commits:
441 raise util.Abort(_('svn: revision %s not found') % revnum)
441 raise util.Abort(_('svn: revision %s not found') % revnum)
442 commit = self.commits[rev]
442 commit = self.commits[rev]
443 # caller caches the result, so free it here to release memory
443 # caller caches the result, so free it here to release memory
444 del self.commits[rev]
444 del self.commits[rev]
445 return commit
445 return commit
446
446
447 def gettags(self):
447 def gettags(self):
448 tags = {}
448 tags = {}
449 if self.tags is None:
449 if self.tags is None:
450 return tags
450 return tags
451
451
452 # svn tags are just a convention, project branches left in a
452 # svn tags are just a convention, project branches left in a
453 # 'tags' directory. There is no other relationship than
453 # 'tags' directory. There is no other relationship than
454 # ancestry, which is expensive to discover and makes them hard
454 # ancestry, which is expensive to discover and makes them hard
455 # to update incrementally. Worse, past revisions may be
455 # to update incrementally. Worse, past revisions may be
456 # referenced by tags far away in the future, requiring a deep
456 # referenced by tags far away in the future, requiring a deep
457 # history traversal on every calculation. Current code
457 # history traversal on every calculation. Current code
458 # performs a single backward traversal, tracking moves within
458 # performs a single backward traversal, tracking moves within
459 # the tags directory (tag renaming) and recording a new tag
459 # the tags directory (tag renaming) and recording a new tag
460 # everytime a project is copied from outside the tags
460 # everytime a project is copied from outside the tags
461 # directory. It also lists deleted tags, this behaviour may
461 # directory. It also lists deleted tags, this behaviour may
462 # change in the future.
462 # change in the future.
463 pendings = []
463 pendings = []
464 tagspath = self.tags
464 tagspath = self.tags
465 start = svn.ra.get_latest_revnum(self.ra)
465 start = svn.ra.get_latest_revnum(self.ra)
466 stream = self._getlog([self.tags], start, self.startrev)
466 stream = self._getlog([self.tags], start, self.startrev)
467 try:
467 try:
468 for entry in stream:
468 for entry in stream:
469 origpaths, revnum, author, date, message = entry
469 origpaths, revnum, author, date, message = entry
470 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
470 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
471 in origpaths.iteritems() if e.copyfrom_path]
471 in origpaths.iteritems() if e.copyfrom_path]
472 # Apply moves/copies from more specific to general
472 # Apply moves/copies from more specific to general
473 copies.sort(reverse=True)
473 copies.sort(reverse=True)
474
474
475 srctagspath = tagspath
475 srctagspath = tagspath
476 if copies and copies[-1][2] == tagspath:
476 if copies and copies[-1][2] == tagspath:
477 # Track tags directory moves
477 # Track tags directory moves
478 srctagspath = copies.pop()[0]
478 srctagspath = copies.pop()[0]
479
479
480 for source, sourcerev, dest in copies:
480 for source, sourcerev, dest in copies:
481 if not dest.startswith(tagspath + '/'):
481 if not dest.startswith(tagspath + '/'):
482 continue
482 continue
483 for tag in pendings:
483 for tag in pendings:
484 if tag[0].startswith(dest):
484 if tag[0].startswith(dest):
485 tagpath = source + tag[0][len(dest):]
485 tagpath = source + tag[0][len(dest):]
486 tag[:2] = [tagpath, sourcerev]
486 tag[:2] = [tagpath, sourcerev]
487 break
487 break
488 else:
488 else:
489 pendings.append([source, sourcerev, dest])
489 pendings.append([source, sourcerev, dest])
490
490
491 # Filter out tags with children coming from different
491 # Filter out tags with children coming from different
492 # parts of the repository like:
492 # parts of the repository like:
493 # /tags/tag.1 (from /trunk:10)
493 # /tags/tag.1 (from /trunk:10)
494 # /tags/tag.1/foo (from /branches/foo:12)
494 # /tags/tag.1/foo (from /branches/foo:12)
495 # Here/tags/tag.1 discarded as well as its children.
495 # Here/tags/tag.1 discarded as well as its children.
496 # It happens with tools like cvs2svn. Such tags cannot
496 # It happens with tools like cvs2svn. Such tags cannot
497 # be represented in mercurial.
497 # be represented in mercurial.
498 addeds = dict((p, e.copyfrom_path) for p, e
498 addeds = dict((p, e.copyfrom_path) for p, e
499 in origpaths.iteritems()
499 in origpaths.iteritems()
500 if e.action == 'A' and e.copyfrom_path)
500 if e.action == 'A' and e.copyfrom_path)
501 badroots = set()
501 badroots = set()
502 for destroot in addeds:
502 for destroot in addeds:
503 for source, sourcerev, dest in pendings:
503 for source, sourcerev, dest in pendings:
504 if (not dest.startswith(destroot + '/')
504 if (not dest.startswith(destroot + '/')
505 or source.startswith(addeds[destroot] + '/')):
505 or source.startswith(addeds[destroot] + '/')):
506 continue
506 continue
507 badroots.add(destroot)
507 badroots.add(destroot)
508 break
508 break
509
509
510 for badroot in badroots:
510 for badroot in badroots:
511 pendings = [p for p in pendings if p[2] != badroot
511 pendings = [p for p in pendings if p[2] != badroot
512 and not p[2].startswith(badroot + '/')]
512 and not p[2].startswith(badroot + '/')]
513
513
514 # Tell tag renamings from tag creations
514 # Tell tag renamings from tag creations
515 renamings = []
515 renamings = []
516 for source, sourcerev, dest in pendings:
516 for source, sourcerev, dest in pendings:
517 tagname = dest.split('/')[-1]
517 tagname = dest.split('/')[-1]
518 if source.startswith(srctagspath):
518 if source.startswith(srctagspath):
519 renamings.append([source, sourcerev, tagname])
519 renamings.append([source, sourcerev, tagname])
520 continue
520 continue
521 if tagname in tags:
521 if tagname in tags:
522 # Keep the latest tag value
522 # Keep the latest tag value
523 continue
523 continue
524 # From revision may be fake, get one with changes
524 # From revision may be fake, get one with changes
525 try:
525 try:
526 tagid = self.latest(source, sourcerev)
526 tagid = self.latest(source, sourcerev)
527 if tagid and tagname not in tags:
527 if tagid and tagname not in tags:
528 tags[tagname] = tagid
528 tags[tagname] = tagid
529 except SvnPathNotFound:
529 except SvnPathNotFound:
530 # It happens when we are following directories
530 # It happens when we are following directories
531 # we assumed were copied with their parents
531 # we assumed were copied with their parents
532 # but were really created in the tag
532 # but were really created in the tag
533 # directory.
533 # directory.
534 pass
534 pass
535 pendings = renamings
535 pendings = renamings
536 tagspath = srctagspath
536 tagspath = srctagspath
537 finally:
537 finally:
538 stream.close()
538 stream.close()
539 return tags
539 return tags
540
540
541 def converted(self, rev, destrev):
541 def converted(self, rev, destrev):
542 if not self.wc:
542 if not self.wc:
543 return
543 return
544 if self.convertfp is None:
544 if self.convertfp is None:
545 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
545 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
546 'a')
546 'a')
547 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
547 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
548 self.convertfp.flush()
548 self.convertfp.flush()
549
549
550 def revid(self, revnum, module=None):
550 def revid(self, revnum, module=None):
551 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
551 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
552
552
553 def revnum(self, rev):
553 def revnum(self, rev):
554 return int(rev.split('@')[-1])
554 return int(rev.split('@')[-1])
555
555
556 def latest(self, path, stop=None):
556 def latest(self, path, stop=None):
557 """Find the latest revid affecting path, up to stop revision
557 """Find the latest revid affecting path, up to stop revision
558 number. If stop is None, default to repository latest
558 number. If stop is None, default to repository latest
559 revision. It may return a revision in a different module,
559 revision. It may return a revision in a different module,
560 since a branch may be moved without a change being
560 since a branch may be moved without a change being
561 reported. Return None if computed module does not belong to
561 reported. Return None if computed module does not belong to
562 rootmodule subtree.
562 rootmodule subtree.
563 """
563 """
564 def findchanges(path, start, stop=None):
564 def findchanges(path, start, stop=None):
565 stream = self._getlog([path], start, stop or 1)
565 stream = self._getlog([path], start, stop or 1)
566 try:
566 try:
567 for entry in stream:
567 for entry in stream:
568 paths, revnum, author, date, message = entry
568 paths, revnum, author, date, message = entry
569 if stop is None and paths:
569 if stop is None and paths:
570 # We do not know the latest changed revision,
570 # We do not know the latest changed revision,
571 # keep the first one with changed paths.
571 # keep the first one with changed paths.
572 break
572 break
573 if revnum <= stop:
573 if revnum <= stop:
574 break
574 break
575
575
576 for p in paths:
576 for p in paths:
577 if (not path.startswith(p) or
577 if (not path.startswith(p) or
578 not paths[p].copyfrom_path):
578 not paths[p].copyfrom_path):
579 continue
579 continue
580 newpath = paths[p].copyfrom_path + path[len(p):]
580 newpath = paths[p].copyfrom_path + path[len(p):]
581 self.ui.debug("branch renamed from %s to %s at %d\n" %
581 self.ui.debug("branch renamed from %s to %s at %d\n" %
582 (path, newpath, revnum))
582 (path, newpath, revnum))
583 path = newpath
583 path = newpath
584 break
584 break
585 if not paths:
585 if not paths:
586 revnum = None
586 revnum = None
587 return revnum, path
587 return revnum, path
588 finally:
588 finally:
589 stream.close()
589 stream.close()
590
590
591 if not path.startswith(self.rootmodule):
591 if not path.startswith(self.rootmodule):
592 # Requests on foreign branches may be forbidden at server level
592 # Requests on foreign branches may be forbidden at server level
593 self.ui.debug('ignoring foreign branch %r\n' % path)
593 self.ui.debug('ignoring foreign branch %r\n' % path)
594 return None
594 return None
595
595
596 if stop is None:
596 if stop is None:
597 stop = svn.ra.get_latest_revnum(self.ra)
597 stop = svn.ra.get_latest_revnum(self.ra)
598 try:
598 try:
599 prevmodule = self.reparent('')
599 prevmodule = self.reparent('')
600 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
600 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
601 self.reparent(prevmodule)
601 self.reparent(prevmodule)
602 except SubversionException:
602 except SubversionException:
603 dirent = None
603 dirent = None
604 if not dirent:
604 if not dirent:
605 raise SvnPathNotFound(_('%s not found up to revision %d')
605 raise SvnPathNotFound(_('%s not found up to revision %d')
606 % (path, stop))
606 % (path, stop))
607
607
608 # stat() gives us the previous revision on this line of
608 # stat() gives us the previous revision on this line of
609 # development, but it might be in *another module*. Fetch the
609 # development, but it might be in *another module*. Fetch the
610 # log and detect renames down to the latest revision.
610 # log and detect renames down to the latest revision.
611 revnum, realpath = findchanges(path, stop, dirent.created_rev)
611 revnum, realpath = findchanges(path, stop, dirent.created_rev)
612 if revnum is None:
612 if revnum is None:
613 # Tools like svnsync can create empty revision, when
613 # Tools like svnsync can create empty revision, when
614 # synchronizing only a subtree for instance. These empty
614 # synchronizing only a subtree for instance. These empty
615 # revisions created_rev still have their original values
615 # revisions created_rev still have their original values
616 # despite all changes having disappeared and can be
616 # despite all changes having disappeared and can be
617 # returned by ra.stat(), at least when stating the root
617 # returned by ra.stat(), at least when stating the root
618 # module. In that case, do not trust created_rev and scan
618 # module. In that case, do not trust created_rev and scan
619 # the whole history.
619 # the whole history.
620 revnum, realpath = findchanges(path, stop)
620 revnum, realpath = findchanges(path, stop)
621 if revnum is None:
621 if revnum is None:
622 self.ui.debug('ignoring empty branch %r\n' % realpath)
622 self.ui.debug('ignoring empty branch %r\n' % realpath)
623 return None
623 return None
624
624
625 if not realpath.startswith(self.rootmodule):
625 if not realpath.startswith(self.rootmodule):
626 self.ui.debug('ignoring foreign branch %r\n' % realpath)
626 self.ui.debug('ignoring foreign branch %r\n' % realpath)
627 return None
627 return None
628 return self.revid(revnum, realpath)
628 return self.revid(revnum, realpath)
629
629
630 def reparent(self, module):
630 def reparent(self, module):
631 """Reparent the svn transport and return the previous parent."""
631 """Reparent the svn transport and return the previous parent."""
632 if self.prevmodule == module:
632 if self.prevmodule == module:
633 return module
633 return module
634 svnurl = self.baseurl + quote(module)
634 svnurl = self.baseurl + quote(module)
635 prevmodule = self.prevmodule
635 prevmodule = self.prevmodule
636 if prevmodule is None:
636 if prevmodule is None:
637 prevmodule = ''
637 prevmodule = ''
638 self.ui.debug("reparent to %s\n" % svnurl)
638 self.ui.debug("reparent to %s\n" % svnurl)
639 svn.ra.reparent(self.ra, svnurl)
639 svn.ra.reparent(self.ra, svnurl)
640 self.prevmodule = module
640 self.prevmodule = module
641 return prevmodule
641 return prevmodule
642
642
643 def expandpaths(self, rev, paths, parents):
643 def expandpaths(self, rev, paths, parents):
644 changed, removed = set(), set()
644 changed, removed = set(), set()
645 copies = {}
645 copies = {}
646
646
647 new_module, revnum = revsplit(rev)[1:]
647 new_module, revnum = revsplit(rev)[1:]
648 if new_module != self.module:
648 if new_module != self.module:
649 self.module = new_module
649 self.module = new_module
650 self.reparent(self.module)
650 self.reparent(self.module)
651
651
652 for i, (path, ent) in enumerate(paths):
652 for i, (path, ent) in enumerate(paths):
653 self.ui.progress(_('scanning paths'), i, item=path,
653 self.ui.progress(_('scanning paths'), i, item=path,
654 total=len(paths))
654 total=len(paths))
655 entrypath = self.getrelpath(path)
655 entrypath = self.getrelpath(path)
656
656
657 kind = self._checkpath(entrypath, revnum)
657 kind = self._checkpath(entrypath, revnum)
658 if kind == svn.core.svn_node_file:
658 if kind == svn.core.svn_node_file:
659 changed.add(self.recode(entrypath))
659 changed.add(self.recode(entrypath))
660 if not ent.copyfrom_path or not parents:
660 if not ent.copyfrom_path or not parents:
661 continue
661 continue
662 # Copy sources not in parent revisions cannot be
662 # Copy sources not in parent revisions cannot be
663 # represented, ignore their origin for now
663 # represented, ignore their origin for now
664 pmodule, prevnum = revsplit(parents[0])[1:]
664 pmodule, prevnum = revsplit(parents[0])[1:]
665 if ent.copyfrom_rev < prevnum:
665 if ent.copyfrom_rev < prevnum:
666 continue
666 continue
667 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
667 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
668 if not copyfrom_path:
668 if not copyfrom_path:
669 continue
669 continue
670 self.ui.debug("copied to %s from %s@%s\n" %
670 self.ui.debug("copied to %s from %s@%s\n" %
671 (entrypath, copyfrom_path, ent.copyfrom_rev))
671 (entrypath, copyfrom_path, ent.copyfrom_rev))
672 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
672 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
673 elif kind == 0: # gone, but had better be a deleted *file*
673 elif kind == 0: # gone, but had better be a deleted *file*
674 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
674 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
675 pmodule, prevnum = revsplit(parents[0])[1:]
675 pmodule, prevnum = revsplit(parents[0])[1:]
676 parentpath = pmodule + "/" + entrypath
676 parentpath = pmodule + "/" + entrypath
677 fromkind = self._checkpath(entrypath, prevnum, pmodule)
677 fromkind = self._checkpath(entrypath, prevnum, pmodule)
678
678
679 if fromkind == svn.core.svn_node_file:
679 if fromkind == svn.core.svn_node_file:
680 removed.add(self.recode(entrypath))
680 removed.add(self.recode(entrypath))
681 elif fromkind == svn.core.svn_node_dir:
681 elif fromkind == svn.core.svn_node_dir:
682 oroot = parentpath.strip('/')
682 oroot = parentpath.strip('/')
683 nroot = path.strip('/')
683 nroot = path.strip('/')
684 children = self._iterfiles(oroot, prevnum)
684 children = self._iterfiles(oroot, prevnum)
685 for childpath in children:
685 for childpath in children:
686 childpath = childpath.replace(oroot, nroot)
686 childpath = childpath.replace(oroot, nroot)
687 childpath = self.getrelpath("/" + childpath, pmodule)
687 childpath = self.getrelpath("/" + childpath, pmodule)
688 if childpath:
688 if childpath:
689 removed.add(self.recode(childpath))
689 removed.add(self.recode(childpath))
690 else:
690 else:
691 self.ui.debug('unknown path in revision %d: %s\n' % \
691 self.ui.debug('unknown path in revision %d: %s\n' % \
692 (revnum, path))
692 (revnum, path))
693 elif kind == svn.core.svn_node_dir:
693 elif kind == svn.core.svn_node_dir:
694 if ent.action == 'M':
694 if ent.action == 'M':
695 # If the directory just had a prop change,
695 # If the directory just had a prop change,
696 # then we shouldn't need to look for its children.
696 # then we shouldn't need to look for its children.
697 continue
697 continue
698 if ent.action == 'R' and parents:
698 if ent.action == 'R' and parents:
699 # If a directory is replacing a file, mark the previous
699 # If a directory is replacing a file, mark the previous
700 # file as deleted
700 # file as deleted
701 pmodule, prevnum = revsplit(parents[0])[1:]
701 pmodule, prevnum = revsplit(parents[0])[1:]
702 pkind = self._checkpath(entrypath, prevnum, pmodule)
702 pkind = self._checkpath(entrypath, prevnum, pmodule)
703 if pkind == svn.core.svn_node_file:
703 if pkind == svn.core.svn_node_file:
704 removed.add(self.recode(entrypath))
704 removed.add(self.recode(entrypath))
705 elif pkind == svn.core.svn_node_dir:
705 elif pkind == svn.core.svn_node_dir:
706 # We do not know what files were kept or removed,
706 # We do not know what files were kept or removed,
707 # mark them all as changed.
707 # mark them all as changed.
708 for childpath in self._iterfiles(pmodule, prevnum):
708 for childpath in self._iterfiles(pmodule, prevnum):
709 childpath = self.getrelpath("/" + childpath)
709 childpath = self.getrelpath("/" + childpath)
710 if childpath:
710 if childpath:
711 changed.add(self.recode(childpath))
711 changed.add(self.recode(childpath))
712
712
713 for childpath in self._iterfiles(path, revnum):
713 for childpath in self._iterfiles(path, revnum):
714 childpath = self.getrelpath("/" + childpath)
714 childpath = self.getrelpath("/" + childpath)
715 if childpath:
715 if childpath:
716 changed.add(self.recode(childpath))
716 changed.add(self.recode(childpath))
717
717
718 # Handle directory copies
718 # Handle directory copies
719 if not ent.copyfrom_path or not parents:
719 if not ent.copyfrom_path or not parents:
720 continue
720 continue
721 # Copy sources not in parent revisions cannot be
721 # Copy sources not in parent revisions cannot be
722 # represented, ignore their origin for now
722 # represented, ignore their origin for now
723 pmodule, prevnum = revsplit(parents[0])[1:]
723 pmodule, prevnum = revsplit(parents[0])[1:]
724 if ent.copyfrom_rev < prevnum:
724 if ent.copyfrom_rev < prevnum:
725 continue
725 continue
726 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
726 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
727 if not copyfrompath:
727 if not copyfrompath:
728 continue
728 continue
729 self.ui.debug("mark %s came from %s:%d\n"
729 self.ui.debug("mark %s came from %s:%d\n"
730 % (path, copyfrompath, ent.copyfrom_rev))
730 % (path, copyfrompath, ent.copyfrom_rev))
731 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
731 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
732 for childpath in children:
732 for childpath in children:
733 childpath = self.getrelpath("/" + childpath, pmodule)
733 childpath = self.getrelpath("/" + childpath, pmodule)
734 if not childpath:
734 if not childpath:
735 continue
735 continue
736 copytopath = path + childpath[len(copyfrompath):]
736 copytopath = path + childpath[len(copyfrompath):]
737 copytopath = self.getrelpath(copytopath)
737 copytopath = self.getrelpath(copytopath)
738 copies[self.recode(copytopath)] = self.recode(childpath)
738 copies[self.recode(copytopath)] = self.recode(childpath)
739
739
740 self.ui.progress(_('scanning paths'), None)
740 self.ui.progress(_('scanning paths'), None)
741 changed.update(removed)
741 changed.update(removed)
742 return (list(changed), removed, copies)
742 return (list(changed), removed, copies)
743
743
744 def _fetch_revisions(self, from_revnum, to_revnum):
744 def _fetch_revisions(self, from_revnum, to_revnum):
745 if from_revnum < to_revnum:
745 if from_revnum < to_revnum:
746 from_revnum, to_revnum = to_revnum, from_revnum
746 from_revnum, to_revnum = to_revnum, from_revnum
747
747
748 self.child_cset = None
748 self.child_cset = None
749
749
750 def parselogentry(orig_paths, revnum, author, date, message):
750 def parselogentry(orig_paths, revnum, author, date, message):
751 """Return the parsed commit object or None, and True if
751 """Return the parsed commit object or None, and True if
752 the revision is a branch root.
752 the revision is a branch root.
753 """
753 """
754 self.ui.debug("parsing revision %d (%d changes)\n" %
754 self.ui.debug("parsing revision %d (%d changes)\n" %
755 (revnum, len(orig_paths)))
755 (revnum, len(orig_paths)))
756
756
757 branched = False
757 branched = False
758 rev = self.revid(revnum)
758 rev = self.revid(revnum)
759 # branch log might return entries for a parent we already have
759 # branch log might return entries for a parent we already have
760
760
761 if rev in self.commits or revnum < to_revnum:
761 if rev in self.commits or revnum < to_revnum:
762 return None, branched
762 return None, branched
763
763
764 parents = []
764 parents = []
765 # check whether this revision is the start of a branch or part
765 # check whether this revision is the start of a branch or part
766 # of a branch renaming
766 # of a branch renaming
767 orig_paths = sorted(orig_paths.iteritems())
767 orig_paths = sorted(orig_paths.iteritems())
768 root_paths = [(p, e) for p, e in orig_paths
768 root_paths = [(p, e) for p, e in orig_paths
769 if self.module.startswith(p)]
769 if self.module.startswith(p)]
770 if root_paths:
770 if root_paths:
771 path, ent = root_paths[-1]
771 path, ent = root_paths[-1]
772 if ent.copyfrom_path:
772 if ent.copyfrom_path:
773 branched = True
773 branched = True
774 newpath = ent.copyfrom_path + self.module[len(path):]
774 newpath = ent.copyfrom_path + self.module[len(path):]
775 # ent.copyfrom_rev may not be the actual last revision
775 # ent.copyfrom_rev may not be the actual last revision
776 previd = self.latest(newpath, ent.copyfrom_rev)
776 previd = self.latest(newpath, ent.copyfrom_rev)
777 if previd is not None:
777 if previd is not None:
778 prevmodule, prevnum = revsplit(previd)[1:]
778 prevmodule, prevnum = revsplit(previd)[1:]
779 if prevnum >= self.startrev:
779 if prevnum >= self.startrev:
780 parents = [previd]
780 parents = [previd]
781 self.ui.note(
781 self.ui.note(
782 _('found parent of branch %s at %d: %s\n') %
782 _('found parent of branch %s at %d: %s\n') %
783 (self.module, prevnum, prevmodule))
783 (self.module, prevnum, prevmodule))
784 else:
784 else:
785 self.ui.debug("no copyfrom path, don't know what to do.\n")
785 self.ui.debug("no copyfrom path, don't know what to do.\n")
786
786
787 paths = []
787 paths = []
788 # filter out unrelated paths
788 # filter out unrelated paths
789 for path, ent in orig_paths:
789 for path, ent in orig_paths:
790 if self.getrelpath(path) is None:
790 if self.getrelpath(path) is None:
791 continue
791 continue
792 paths.append((path, ent))
792 paths.append((path, ent))
793
793
794 # Example SVN datetime. Includes microseconds.
794 # Example SVN datetime. Includes microseconds.
795 # ISO-8601 conformant
795 # ISO-8601 conformant
796 # '2007-01-04T17:35:00.902377Z'
796 # '2007-01-04T17:35:00.902377Z'
797 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
797 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
798
798
799 log = message and self.recode(message) or ''
799 log = message and self.recode(message) or ''
800 author = author and self.recode(author) or ''
800 author = author and self.recode(author) or ''
801 try:
801 try:
802 branch = self.module.split("/")[-1]
802 branch = self.module.split("/")[-1]
803 if branch == self.trunkname:
803 if branch == self.trunkname:
804 branch = None
804 branch = None
805 except IndexError:
805 except IndexError:
806 branch = None
806 branch = None
807
807
808 cset = commit(author=author,
808 cset = commit(author=author,
809 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
809 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
810 desc=log,
810 desc=log,
811 parents=parents,
811 parents=parents,
812 branch=branch,
812 branch=branch,
813 rev=rev)
813 rev=rev)
814
814
815 self.commits[rev] = cset
815 self.commits[rev] = cset
816 # The parents list is *shared* among self.paths and the
816 # The parents list is *shared* among self.paths and the
817 # commit object. Both will be updated below.
817 # commit object. Both will be updated below.
818 self.paths[rev] = (paths, cset.parents)
818 self.paths[rev] = (paths, cset.parents)
819 if self.child_cset and not self.child_cset.parents:
819 if self.child_cset and not self.child_cset.parents:
820 self.child_cset.parents[:] = [rev]
820 self.child_cset.parents[:] = [rev]
821 self.child_cset = cset
821 self.child_cset = cset
822 return cset, branched
822 return cset, branched
823
823
824 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
824 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
825 (self.module, from_revnum, to_revnum))
825 (self.module, from_revnum, to_revnum))
826
826
827 try:
827 try:
828 firstcset = None
828 firstcset = None
829 lastonbranch = False
829 lastonbranch = False
830 stream = self._getlog([self.module], from_revnum, to_revnum)
830 stream = self._getlog([self.module], from_revnum, to_revnum)
831 try:
831 try:
832 for entry in stream:
832 for entry in stream:
833 paths, revnum, author, date, message = entry
833 paths, revnum, author, date, message = entry
834 if revnum < self.startrev:
834 if revnum < self.startrev:
835 lastonbranch = True
835 lastonbranch = True
836 break
836 break
837 if not paths:
837 if not paths:
838 self.ui.debug('revision %d has no entries\n' % revnum)
838 self.ui.debug('revision %d has no entries\n' % revnum)
839 # If we ever leave the loop on an empty
839 # If we ever leave the loop on an empty
840 # revision, do not try to get a parent branch
840 # revision, do not try to get a parent branch
841 lastonbranch = lastonbranch or revnum == 0
841 lastonbranch = lastonbranch or revnum == 0
842 continue
842 continue
843 cset, lastonbranch = parselogentry(paths, revnum, author,
843 cset, lastonbranch = parselogentry(paths, revnum, author,
844 date, message)
844 date, message)
845 if cset:
845 if cset:
846 firstcset = cset
846 firstcset = cset
847 if lastonbranch:
847 if lastonbranch:
848 break
848 break
849 finally:
849 finally:
850 stream.close()
850 stream.close()
851
851
852 if not lastonbranch and firstcset and not firstcset.parents:
852 if not lastonbranch and firstcset and not firstcset.parents:
853 # The first revision of the sequence (the last fetched one)
853 # The first revision of the sequence (the last fetched one)
854 # has invalid parents if not a branch root. Find the parent
854 # has invalid parents if not a branch root. Find the parent
855 # revision now, if any.
855 # revision now, if any.
856 try:
856 try:
857 firstrevnum = self.revnum(firstcset.rev)
857 firstrevnum = self.revnum(firstcset.rev)
858 if firstrevnum > 1:
858 if firstrevnum > 1:
859 latest = self.latest(self.module, firstrevnum - 1)
859 latest = self.latest(self.module, firstrevnum - 1)
860 if latest:
860 if latest:
861 firstcset.parents.append(latest)
861 firstcset.parents.append(latest)
862 except SvnPathNotFound:
862 except SvnPathNotFound:
863 pass
863 pass
864 except SubversionException, (inst, num):
864 except SubversionException, (inst, num):
865 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
865 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
866 raise util.Abort(_('svn: branch has no revision %s')
866 raise util.Abort(_('svn: branch has no revision %s')
867 % to_revnum)
867 % to_revnum)
868 raise
868 raise
869
869
870 def getfile(self, file, rev):
870 def getfile(self, file, rev):
871 # TODO: ra.get_file transmits the whole file instead of diffs.
871 # TODO: ra.get_file transmits the whole file instead of diffs.
872 if file in self.removed:
872 if file in self.removed:
873 raise IOError
873 raise IOError
874 mode = ''
874 mode = ''
875 try:
875 try:
876 new_module, revnum = revsplit(rev)[1:]
876 new_module, revnum = revsplit(rev)[1:]
877 if self.module != new_module:
877 if self.module != new_module:
878 self.module = new_module
878 self.module = new_module
879 self.reparent(self.module)
879 self.reparent(self.module)
880 io = StringIO()
880 io = StringIO()
881 info = svn.ra.get_file(self.ra, file, revnum, io)
881 info = svn.ra.get_file(self.ra, file, revnum, io)
882 data = io.getvalue()
882 data = io.getvalue()
883 # ra.get_files() seems to keep a reference on the input buffer
883 # ra.get_files() seems to keep a reference on the input buffer
884 # preventing collection. Release it explicitely.
884 # preventing collection. Release it explicitely.
885 io.close()
885 io.close()
886 if isinstance(info, list):
886 if isinstance(info, list):
887 info = info[-1]
887 info = info[-1]
888 mode = ("svn:executable" in info) and 'x' or ''
888 mode = ("svn:executable" in info) and 'x' or ''
889 mode = ("svn:special" in info) and 'l' or mode
889 mode = ("svn:special" in info) and 'l' or mode
890 except SubversionException, e:
890 except SubversionException, e:
891 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
891 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
892 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
892 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
893 if e.apr_err in notfound: # File not found
893 if e.apr_err in notfound: # File not found
894 raise IOError
894 raise IOError
895 raise
895 raise
896 if mode == 'l':
896 if mode == 'l':
897 link_prefix = "link "
897 link_prefix = "link "
898 if data.startswith(link_prefix):
898 if data.startswith(link_prefix):
899 data = data[len(link_prefix):]
899 data = data[len(link_prefix):]
900 return data, mode
900 return data, mode
901
901
902 def _iterfiles(self, path, revnum):
902 def _iterfiles(self, path, revnum):
903 """Enumerate all files in path at revnum, recursively."""
903 """Enumerate all files in path at revnum, recursively."""
904 path = path.strip('/')
904 path = path.strip('/')
905 pool = Pool()
905 pool = Pool()
906 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
906 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
907 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
907 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
908 if path:
908 if path:
909 path += '/'
909 path += '/'
910 return ((path + p) for p, e in entries.iteritems()
910 return ((path + p) for p, e in entries.iteritems()
911 if e.kind == svn.core.svn_node_file)
911 if e.kind == svn.core.svn_node_file)
912
912
913 def getrelpath(self, path, module=None):
913 def getrelpath(self, path, module=None):
914 if module is None:
914 if module is None:
915 module = self.module
915 module = self.module
916 # Given the repository url of this wc, say
916 # Given the repository url of this wc, say
917 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
917 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
918 # extract the "entry" portion (a relative path) from what
918 # extract the "entry" portion (a relative path) from what
919 # svn log --xml says, ie
919 # svn log --xml says, ie
920 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
920 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
921 # that is to say "tests/PloneTestCase.py"
921 # that is to say "tests/PloneTestCase.py"
922 if path.startswith(module):
922 if path.startswith(module):
923 relative = path.rstrip('/')[len(module):]
923 relative = path.rstrip('/')[len(module):]
924 if relative.startswith('/'):
924 if relative.startswith('/'):
925 return relative[1:]
925 return relative[1:]
926 elif relative == '':
926 elif relative == '':
927 return relative
927 return relative
928
928
929 # The path is outside our tracked tree...
929 # The path is outside our tracked tree...
930 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
930 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
931 return None
931 return None
932
932
933 def _checkpath(self, path, revnum, module=None):
933 def _checkpath(self, path, revnum, module=None):
934 if module is not None:
934 if module is not None:
935 prevmodule = self.reparent('')
935 prevmodule = self.reparent('')
936 path = module + '/' + path
936 path = module + '/' + path
937 try:
937 try:
938 # ra.check_path does not like leading slashes very much, it leads
938 # ra.check_path does not like leading slashes very much, it leads
939 # to PROPFIND subversion errors
939 # to PROPFIND subversion errors
940 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
940 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
941 finally:
941 finally:
942 if module is not None:
942 if module is not None:
943 self.reparent(prevmodule)
943 self.reparent(prevmodule)
944
944
945 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
945 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
946 strict_node_history=False):
946 strict_node_history=False):
947 # Normalize path names, svn >= 1.5 only wants paths relative to
947 # Normalize path names, svn >= 1.5 only wants paths relative to
948 # supplied URL
948 # supplied URL
949 relpaths = []
949 relpaths = []
950 for p in paths:
950 for p in paths:
951 if not p.startswith('/'):
951 if not p.startswith('/'):
952 p = self.module + '/' + p
952 p = self.module + '/' + p
953 relpaths.append(p.strip('/'))
953 relpaths.append(p.strip('/'))
954 args = [self.baseurl, relpaths, start, end, limit,
954 args = [self.baseurl, relpaths, start, end, limit,
955 discover_changed_paths, strict_node_history]
955 discover_changed_paths, strict_node_history]
956 arg = encodeargs(args)
956 arg = encodeargs(args)
957 hgexe = util.hgexecutable()
957 hgexe = util.hgexecutable()
958 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
958 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
959 stdin, stdout = util.popen2(util.quotecommand(cmd))
959 stdin, stdout = util.popen2(util.quotecommand(cmd))
960 stdin.write(arg)
960 stdin.write(arg)
961 try:
961 try:
962 stdin.close()
962 stdin.close()
963 except IOError:
963 except IOError:
964 raise util.Abort(_('Mercurial failed to run itself, check'
964 raise util.Abort(_('Mercurial failed to run itself, check'
965 ' hg executable is in PATH'))
965 ' hg executable is in PATH'))
966 return logstream(stdout)
966 return logstream(stdout)
967
967
968 pre_revprop_change = '''#!/bin/sh
968 pre_revprop_change = '''#!/bin/sh
969
969
970 REPOS="$1"
970 REPOS="$1"
971 REV="$2"
971 REV="$2"
972 USER="$3"
972 USER="$3"
973 PROPNAME="$4"
973 PROPNAME="$4"
974 ACTION="$5"
974 ACTION="$5"
975
975
976 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
976 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
977 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
977 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
978 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
978 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
979
979
980 echo "Changing prohibited revision property" >&2
980 echo "Changing prohibited revision property" >&2
981 exit 1
981 exit 1
982 '''
982 '''
983
983
984 class svn_sink(converter_sink, commandline):
984 class svn_sink(converter_sink, commandline):
985 commit_re = re.compile(r'Committed revision (\d+).', re.M)
985 commit_re = re.compile(r'Committed revision (\d+).', re.M)
986 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
986 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
987
987
988 def prerun(self):
988 def prerun(self):
989 if self.wc:
989 if self.wc:
990 os.chdir(self.wc)
990 os.chdir(self.wc)
991
991
992 def postrun(self):
992 def postrun(self):
993 if self.wc:
993 if self.wc:
994 os.chdir(self.cwd)
994 os.chdir(self.cwd)
995
995
996 def join(self, name):
996 def join(self, name):
997 return os.path.join(self.wc, '.svn', name)
997 return os.path.join(self.wc, '.svn', name)
998
998
999 def revmapfile(self):
999 def revmapfile(self):
1000 return self.join('hg-shamap')
1000 return self.join('hg-shamap')
1001
1001
1002 def authorfile(self):
1002 def authorfile(self):
1003 return self.join('hg-authormap')
1003 return self.join('hg-authormap')
1004
1004
1005 def __init__(self, ui, path):
1005 def __init__(self, ui, path):
1006
1006
1007 converter_sink.__init__(self, ui, path)
1007 converter_sink.__init__(self, ui, path)
1008 commandline.__init__(self, ui, 'svn')
1008 commandline.__init__(self, ui, 'svn')
1009 self.delete = []
1009 self.delete = []
1010 self.setexec = []
1010 self.setexec = []
1011 self.delexec = []
1011 self.delexec = []
1012 self.copies = []
1012 self.copies = []
1013 self.wc = None
1013 self.wc = None
1014 self.cwd = os.getcwd()
1014 self.cwd = os.getcwd()
1015
1015
1016 path = os.path.realpath(path)
1016 path = os.path.realpath(path)
1017
1017
1018 created = False
1018 created = False
1019 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1019 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1020 self.wc = path
1020 self.wc = path
1021 self.run0('update')
1021 self.run0('update')
1022 else:
1022 else:
1023 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
1023 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
1024
1024
1025 if os.path.isdir(os.path.dirname(path)):
1025 if os.path.isdir(os.path.dirname(path)):
1026 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1026 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1027 ui.status(_('initializing svn repository %r\n') %
1027 ui.status(_('initializing svn repository %r\n') %
1028 os.path.basename(path))
1028 os.path.basename(path))
1029 commandline(ui, 'svnadmin').run0('create', path)
1029 commandline(ui, 'svnadmin').run0('create', path)
1030 created = path
1030 created = path
1031 path = util.normpath(path)
1031 path = util.normpath(path)
1032 if not path.startswith('/'):
1032 if not path.startswith('/'):
1033 path = '/' + path
1033 path = '/' + path
1034 path = 'file://' + path
1034 path = 'file://' + path
1035
1035
1036 ui.status(_('initializing svn working copy %r\n')
1036 ui.status(_('initializing svn working copy %r\n')
1037 % os.path.basename(wcpath))
1037 % os.path.basename(wcpath))
1038 self.run0('checkout', path, wcpath)
1038 self.run0('checkout', path, wcpath)
1039
1039
1040 self.wc = wcpath
1040 self.wc = wcpath
1041 self.opener = scmutil.opener(self.wc)
1041 self.opener = scmutil.opener(self.wc)
1042 self.wopener = scmutil.opener(self.wc)
1042 self.wopener = scmutil.opener(self.wc)
1043 self.childmap = mapfile(ui, self.join('hg-childmap'))
1043 self.childmap = mapfile(ui, self.join('hg-childmap'))
1044 self.is_exec = util.checkexec(self.wc) and util.isexec or None
1044 self.is_exec = util.checkexec(self.wc) and util.isexec or None
1045
1045
1046 if created:
1046 if created:
1047 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1047 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1048 fp = open(hook, 'w')
1048 fp = open(hook, 'w')
1049 fp.write(pre_revprop_change)
1049 fp.write(pre_revprop_change)
1050 fp.close()
1050 fp.close()
1051 util.setflags(hook, False, True)
1051 util.setflags(hook, False, True)
1052
1052
1053 output = self.run0('info')
1053 output = self.run0('info')
1054 self.uuid = self.uuid_re.search(output).group(1).strip()
1054 self.uuid = self.uuid_re.search(output).group(1).strip()
1055
1055
1056 def wjoin(self, *names):
1056 def wjoin(self, *names):
1057 return os.path.join(self.wc, *names)
1057 return os.path.join(self.wc, *names)
1058
1058
1059 @propertycache
1059 @propertycache
1060 def manifest(self):
1060 def manifest(self):
1061 # As of svn 1.7, the "add" command fails when receiving
1061 # As of svn 1.7, the "add" command fails when receiving
1062 # already tracked entries, so we have to track and filter them
1062 # already tracked entries, so we have to track and filter them
1063 # ourselves.
1063 # ourselves.
1064 m = set()
1064 m = set()
1065 output = self.run0('ls', recursive=True, xml=True)
1065 output = self.run0('ls', recursive=True, xml=True)
1066 doc = xml.dom.minidom.parseString(output)
1066 doc = xml.dom.minidom.parseString(output)
1067 for e in doc.getElementsByTagName('entry'):
1067 for e in doc.getElementsByTagName('entry'):
1068 for n in e.childNodes:
1068 for n in e.childNodes:
1069 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1069 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1070 continue
1070 continue
1071 name = ''.join(c.data for c in n.childNodes
1071 name = ''.join(c.data for c in n.childNodes
1072 if c.nodeType == c.TEXT_NODE)
1072 if c.nodeType == c.TEXT_NODE)
1073 # Entries are compared with names coming from
1073 # Entries are compared with names coming from
1074 # mercurial, so bytes with undefined encoding. Our
1074 # mercurial, so bytes with undefined encoding. Our
1075 # best bet is to assume they are in local
1075 # best bet is to assume they are in local
1076 # encoding. They will be passed to command line calls
1076 # encoding. They will be passed to command line calls
1077 # later anyway, so they better be.
1077 # later anyway, so they better be.
1078 m.add(encoding.tolocal(name.encode('utf-8')))
1078 m.add(encoding.tolocal(name.encode('utf-8')))
1079 break
1079 break
1080 return m
1080 return m
1081
1081
1082 def putfile(self, filename, flags, data):
1082 def putfile(self, filename, flags, data):
1083 if 'l' in flags:
1083 if 'l' in flags:
1084 self.wopener.symlink(data, filename)
1084 self.wopener.symlink(data, filename)
1085 else:
1085 else:
1086 try:
1086 try:
1087 if os.path.islink(self.wjoin(filename)):
1087 if os.path.islink(self.wjoin(filename)):
1088 os.unlink(filename)
1088 os.unlink(filename)
1089 except OSError:
1089 except OSError:
1090 pass
1090 pass
1091 self.wopener.write(filename, data)
1091 self.wopener.write(filename, data)
1092
1092
1093 if self.is_exec:
1093 if self.is_exec:
1094 was_exec = self.is_exec(self.wjoin(filename))
1094 was_exec = self.is_exec(self.wjoin(filename))
1095 else:
1095 else:
1096 # On filesystems not supporting execute-bit, there is no way
1096 # On filesystems not supporting execute-bit, there is no way
1097 # to know if it is set but asking subversion. Setting it
1097 # to know if it is set but asking subversion. Setting it
1098 # systematically is just as expensive and much simpler.
1098 # systematically is just as expensive and much simpler.
1099 was_exec = 'x' not in flags
1099 was_exec = 'x' not in flags
1100
1100
1101 util.setflags(self.wjoin(filename), False, 'x' in flags)
1101 util.setflags(self.wjoin(filename), False, 'x' in flags)
1102 if was_exec:
1102 if was_exec:
1103 if 'x' not in flags:
1103 if 'x' not in flags:
1104 self.delexec.append(filename)
1104 self.delexec.append(filename)
1105 else:
1105 else:
1106 if 'x' in flags:
1106 if 'x' in flags:
1107 self.setexec.append(filename)
1107 self.setexec.append(filename)
1108
1108
1109 def _copyfile(self, source, dest):
1109 def _copyfile(self, source, dest):
1110 # SVN's copy command pukes if the destination file exists, but
1110 # SVN's copy command pukes if the destination file exists, but
1111 # our copyfile method expects to record a copy that has
1111 # our copyfile method expects to record a copy that has
1112 # already occurred. Cross the semantic gap.
1112 # already occurred. Cross the semantic gap.
1113 wdest = self.wjoin(dest)
1113 wdest = self.wjoin(dest)
1114 exists = os.path.lexists(wdest)
1114 exists = os.path.lexists(wdest)
1115 if exists:
1115 if exists:
1116 fd, tempname = tempfile.mkstemp(
1116 fd, tempname = tempfile.mkstemp(
1117 prefix='hg-copy-', dir=os.path.dirname(wdest))
1117 prefix='hg-copy-', dir=os.path.dirname(wdest))
1118 os.close(fd)
1118 os.close(fd)
1119 os.unlink(tempname)
1119 os.unlink(tempname)
1120 os.rename(wdest, tempname)
1120 os.rename(wdest, tempname)
1121 try:
1121 try:
1122 self.run0('copy', source, dest)
1122 self.run0('copy', source, dest)
1123 finally:
1123 finally:
1124 self.manifest.add(dest)
1124 self.manifest.add(dest)
1125 if exists:
1125 if exists:
1126 try:
1126 try:
1127 os.unlink(wdest)
1127 os.unlink(wdest)
1128 except OSError:
1128 except OSError:
1129 pass
1129 pass
1130 os.rename(tempname, wdest)
1130 os.rename(tempname, wdest)
1131
1131
1132 def dirs_of(self, files):
1132 def dirs_of(self, files):
1133 dirs = set()
1133 dirs = set()
1134 for f in files:
1134 for f in files:
1135 if os.path.isdir(self.wjoin(f)):
1135 if os.path.isdir(self.wjoin(f)):
1136 dirs.add(f)
1136 dirs.add(f)
1137 for i in strutil.rfindall(f, '/'):
1137 for i in strutil.rfindall(f, '/'):
1138 dirs.add(f[:i])
1138 dirs.add(f[:i])
1139 return dirs
1139 return dirs
1140
1140
1141 def add_dirs(self, files):
1141 def add_dirs(self, files):
1142 add_dirs = [d for d in sorted(self.dirs_of(files))
1142 add_dirs = [d for d in sorted(self.dirs_of(files))
1143 if d not in self.manifest]
1143 if d not in self.manifest]
1144 if add_dirs:
1144 if add_dirs:
1145 self.manifest.update(add_dirs)
1145 self.manifest.update(add_dirs)
1146 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1146 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1147 return add_dirs
1147 return add_dirs
1148
1148
1149 def add_files(self, files):
1149 def add_files(self, files):
1150 files = [f for f in files if f not in self.manifest]
1150 files = [f for f in files if f not in self.manifest]
1151 if files:
1151 if files:
1152 self.manifest.update(files)
1152 self.manifest.update(files)
1153 self.xargs(files, 'add', quiet=True)
1153 self.xargs(files, 'add', quiet=True)
1154 return files
1154 return files
1155
1155
1156 def tidy_dirs(self, names):
1156 def tidy_dirs(self, names):
1157 deleted = []
1157 deleted = []
1158 for d in sorted(self.dirs_of(names), reverse=True):
1158 for d in sorted(self.dirs_of(names), reverse=True):
1159 wd = self.wjoin(d)
1159 wd = self.wjoin(d)
1160 if os.listdir(wd) == '.svn':
1160 if os.listdir(wd) == '.svn':
1161 self.run0('delete', d)
1161 self.run0('delete', d)
1162 self.manifest.remove(d)
1162 self.manifest.remove(d)
1163 deleted.append(d)
1163 deleted.append(d)
1164 return deleted
1164 return deleted
1165
1165
1166 def addchild(self, parent, child):
1166 def addchild(self, parent, child):
1167 self.childmap[parent] = child
1167 self.childmap[parent] = child
1168
1168
1169 def revid(self, rev):
1169 def revid(self, rev):
1170 return u"svn:%s@%s" % (self.uuid, rev)
1170 return u"svn:%s@%s" % (self.uuid, rev)
1171
1171
1172 def putcommit(self, files, copies, parents, commit, source, revmap):
1172 def putcommit(self, files, copies, parents, commit, source, revmap):
1173 for parent in parents:
1173 for parent in parents:
1174 try:
1174 try:
1175 return self.revid(self.childmap[parent])
1175 return self.revid(self.childmap[parent])
1176 except KeyError:
1176 except KeyError:
1177 pass
1177 pass
1178
1178
1179 # Apply changes to working copy
1179 # Apply changes to working copy
1180 for f, v in files:
1180 for f, v in files:
1181 try:
1181 try:
1182 data, mode = source.getfile(f, v)
1182 data, mode = source.getfile(f, v)
1183 except IOError:
1183 except IOError:
1184 self.delete.append(f)
1184 self.delete.append(f)
1185 else:
1185 else:
1186 self.putfile(f, mode, data)
1186 self.putfile(f, mode, data)
1187 if f in copies:
1187 if f in copies:
1188 self.copies.append([copies[f], f])
1188 self.copies.append([copies[f], f])
1189 files = [f[0] for f in files]
1189 files = [f[0] for f in files]
1190
1190
1191 entries = set(self.delete)
1191 entries = set(self.delete)
1192 files = frozenset(files)
1192 files = frozenset(files)
1193 entries.update(self.add_dirs(files.difference(entries)))
1193 entries.update(self.add_dirs(files.difference(entries)))
1194 if self.copies:
1194 if self.copies:
1195 for s, d in self.copies:
1195 for s, d in self.copies:
1196 self._copyfile(s, d)
1196 self._copyfile(s, d)
1197 self.copies = []
1197 self.copies = []
1198 if self.delete:
1198 if self.delete:
1199 self.xargs(self.delete, 'delete')
1199 self.xargs(self.delete, 'delete')
1200 for f in self.delete:
1200 for f in self.delete:
1201 self.manifest.remove(f)
1201 self.manifest.remove(f)
1202 self.delete = []
1202 self.delete = []
1203 entries.update(self.add_files(files.difference(entries)))
1203 entries.update(self.add_files(files.difference(entries)))
1204 entries.update(self.tidy_dirs(entries))
1204 entries.update(self.tidy_dirs(entries))
1205 if self.delexec:
1205 if self.delexec:
1206 self.xargs(self.delexec, 'propdel', 'svn:executable')
1206 self.xargs(self.delexec, 'propdel', 'svn:executable')
1207 self.delexec = []
1207 self.delexec = []
1208 if self.setexec:
1208 if self.setexec:
1209 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1209 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1210 self.setexec = []
1210 self.setexec = []
1211
1211
1212 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1212 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1213 fp = os.fdopen(fd, 'w')
1213 fp = os.fdopen(fd, 'w')
1214 fp.write(commit.desc)
1214 fp.write(commit.desc)
1215 fp.close()
1215 fp.close()
1216 try:
1216 try:
1217 output = self.run0('commit',
1217 output = self.run0('commit',
1218 username=util.shortuser(commit.author),
1218 username=util.shortuser(commit.author),
1219 file=messagefile,
1219 file=messagefile,
1220 encoding='utf-8')
1220 encoding='utf-8')
1221 try:
1221 try:
1222 rev = self.commit_re.search(output).group(1)
1222 rev = self.commit_re.search(output).group(1)
1223 except AttributeError:
1223 except AttributeError:
1224 if not files:
1224 if not files:
1225 return parents[0]
1225 return parents[0]
1226 self.ui.warn(_('unexpected svn output:\n'))
1226 self.ui.warn(_('unexpected svn output:\n'))
1227 self.ui.warn(output)
1227 self.ui.warn(output)
1228 raise util.Abort(_('unable to cope with svn output'))
1228 raise util.Abort(_('unable to cope with svn output'))
1229 if commit.rev:
1229 if commit.rev:
1230 self.run('propset', 'hg:convert-rev', commit.rev,
1230 self.run('propset', 'hg:convert-rev', commit.rev,
1231 revprop=True, revision=rev)
1231 revprop=True, revision=rev)
1232 if commit.branch and commit.branch != 'default':
1232 if commit.branch and commit.branch != 'default':
1233 self.run('propset', 'hg:convert-branch', commit.branch,
1233 self.run('propset', 'hg:convert-branch', commit.branch,
1234 revprop=True, revision=rev)
1234 revprop=True, revision=rev)
1235 for parent in parents:
1235 for parent in parents:
1236 self.addchild(parent, rev)
1236 self.addchild(parent, rev)
1237 return self.revid(rev)
1237 return self.revid(rev)
1238 finally:
1238 finally:
1239 os.unlink(messagefile)
1239 os.unlink(messagefile)
1240
1240
1241 def puttags(self, tags):
1241 def puttags(self, tags):
1242 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1242 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1243 return None, None
1243 return None, None
1244
1244
1245 def hascommit(self, rev):
1245 def hascommit(self, rev):
1246 # This is not correct as one can convert to an existing subversion
1246 # This is not correct as one can convert to an existing subversion
1247 # repository and childmap would not list all revisions. Too bad.
1247 # repository and childmap would not list all revisions. Too bad.
1248 if rev in self.childmap:
1248 if rev in self.childmap:
1249 return True
1249 return True
1250 raise util.Abort(_('splice map revision %s not found in subversion '
1250 raise util.Abort(_('splice map revision %s not found in subversion '
1251 'child map (revision lookups are not implemented)')
1251 'child map (revision lookups are not implemented)')
1252 % rev)
1252 % rev)
@@ -1,289 +1,289 b''
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 '''commands to sign and verify changesets'''
6 '''commands to sign and verify changesets'''
7
7
8 import os, tempfile, binascii
8 import os, tempfile, binascii
9 from mercurial import util, commands, match, cmdutil
9 from mercurial import util, commands, match, cmdutil
10 from mercurial import node as hgnode
10 from mercurial import node as hgnode
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 cmdtable = {}
13 cmdtable = {}
14 command = cmdutil.command(cmdtable)
14 command = cmdutil.command(cmdtable)
15
15
16 class gpg(object):
16 class gpg(object):
17 def __init__(self, path, key=None):
17 def __init__(self, path, key=None):
18 self.path = path
18 self.path = path
19 self.key = (key and " --local-user \"%s\"" % key) or ""
19 self.key = (key and " --local-user \"%s\"" % key) or ""
20
20
21 def sign(self, data):
21 def sign(self, data):
22 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
22 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
23 return util.filter(data, gpgcmd)
23 return util.filter(data, gpgcmd)
24
24
25 def verify(self, data, sig):
25 def verify(self, data, sig):
26 """ returns of the good and bad signatures"""
26 """ returns of the good and bad signatures"""
27 sigfile = datafile = None
27 sigfile = datafile = None
28 try:
28 try:
29 # create temporary files
29 # create temporary files
30 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
30 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
31 fp = os.fdopen(fd, 'wb')
31 fp = os.fdopen(fd, 'wb')
32 fp.write(sig)
32 fp.write(sig)
33 fp.close()
33 fp.close()
34 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
34 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
35 fp = os.fdopen(fd, 'wb')
35 fp = os.fdopen(fd, 'wb')
36 fp.write(data)
36 fp.write(data)
37 fp.close()
37 fp.close()
38 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
38 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
39 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
39 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
40 ret = util.filter("", gpgcmd)
40 ret = util.filter("", gpgcmd)
41 finally:
41 finally:
42 for f in (sigfile, datafile):
42 for f in (sigfile, datafile):
43 try:
43 try:
44 if f:
44 if f:
45 os.unlink(f)
45 os.unlink(f)
46 except:
46 except OSError:
47 pass
47 pass
48 keys = []
48 keys = []
49 key, fingerprint = None, None
49 key, fingerprint = None, None
50 err = ""
50 err = ""
51 for l in ret.splitlines():
51 for l in ret.splitlines():
52 # see DETAILS in the gnupg documentation
52 # see DETAILS in the gnupg documentation
53 # filter the logger output
53 # filter the logger output
54 if not l.startswith("[GNUPG:]"):
54 if not l.startswith("[GNUPG:]"):
55 continue
55 continue
56 l = l[9:]
56 l = l[9:]
57 if l.startswith("ERRSIG"):
57 if l.startswith("ERRSIG"):
58 err = _("error while verifying signature")
58 err = _("error while verifying signature")
59 break
59 break
60 elif l.startswith("VALIDSIG"):
60 elif l.startswith("VALIDSIG"):
61 # fingerprint of the primary key
61 # fingerprint of the primary key
62 fingerprint = l.split()[10]
62 fingerprint = l.split()[10]
63 elif (l.startswith("GOODSIG") or
63 elif (l.startswith("GOODSIG") or
64 l.startswith("EXPSIG") or
64 l.startswith("EXPSIG") or
65 l.startswith("EXPKEYSIG") or
65 l.startswith("EXPKEYSIG") or
66 l.startswith("BADSIG")):
66 l.startswith("BADSIG")):
67 if key is not None:
67 if key is not None:
68 keys.append(key + [fingerprint])
68 keys.append(key + [fingerprint])
69 key = l.split(" ", 2)
69 key = l.split(" ", 2)
70 fingerprint = None
70 fingerprint = None
71 if err:
71 if err:
72 return err, []
72 return err, []
73 if key is not None:
73 if key is not None:
74 keys.append(key + [fingerprint])
74 keys.append(key + [fingerprint])
75 return err, keys
75 return err, keys
76
76
77 def newgpg(ui, **opts):
77 def newgpg(ui, **opts):
78 """create a new gpg instance"""
78 """create a new gpg instance"""
79 gpgpath = ui.config("gpg", "cmd", "gpg")
79 gpgpath = ui.config("gpg", "cmd", "gpg")
80 gpgkey = opts.get('key')
80 gpgkey = opts.get('key')
81 if not gpgkey:
81 if not gpgkey:
82 gpgkey = ui.config("gpg", "key", None)
82 gpgkey = ui.config("gpg", "key", None)
83 return gpg(gpgpath, gpgkey)
83 return gpg(gpgpath, gpgkey)
84
84
85 def sigwalk(repo):
85 def sigwalk(repo):
86 """
86 """
87 walk over every sigs, yields a couple
87 walk over every sigs, yields a couple
88 ((node, version, sig), (filename, linenumber))
88 ((node, version, sig), (filename, linenumber))
89 """
89 """
90 def parsefile(fileiter, context):
90 def parsefile(fileiter, context):
91 ln = 1
91 ln = 1
92 for l in fileiter:
92 for l in fileiter:
93 if not l:
93 if not l:
94 continue
94 continue
95 yield (l.split(" ", 2), (context, ln))
95 yield (l.split(" ", 2), (context, ln))
96 ln += 1
96 ln += 1
97
97
98 # read the heads
98 # read the heads
99 fl = repo.file(".hgsigs")
99 fl = repo.file(".hgsigs")
100 for r in reversed(fl.heads()):
100 for r in reversed(fl.heads()):
101 fn = ".hgsigs|%s" % hgnode.short(r)
101 fn = ".hgsigs|%s" % hgnode.short(r)
102 for item in parsefile(fl.read(r).splitlines(), fn):
102 for item in parsefile(fl.read(r).splitlines(), fn):
103 yield item
103 yield item
104 try:
104 try:
105 # read local signatures
105 # read local signatures
106 fn = "localsigs"
106 fn = "localsigs"
107 for item in parsefile(repo.opener(fn), fn):
107 for item in parsefile(repo.opener(fn), fn):
108 yield item
108 yield item
109 except IOError:
109 except IOError:
110 pass
110 pass
111
111
112 def getkeys(ui, repo, mygpg, sigdata, context):
112 def getkeys(ui, repo, mygpg, sigdata, context):
113 """get the keys who signed a data"""
113 """get the keys who signed a data"""
114 fn, ln = context
114 fn, ln = context
115 node, version, sig = sigdata
115 node, version, sig = sigdata
116 prefix = "%s:%d" % (fn, ln)
116 prefix = "%s:%d" % (fn, ln)
117 node = hgnode.bin(node)
117 node = hgnode.bin(node)
118
118
119 data = node2txt(repo, node, version)
119 data = node2txt(repo, node, version)
120 sig = binascii.a2b_base64(sig)
120 sig = binascii.a2b_base64(sig)
121 err, keys = mygpg.verify(data, sig)
121 err, keys = mygpg.verify(data, sig)
122 if err:
122 if err:
123 ui.warn("%s:%d %s\n" % (fn, ln , err))
123 ui.warn("%s:%d %s\n" % (fn, ln , err))
124 return None
124 return None
125
125
126 validkeys = []
126 validkeys = []
127 # warn for expired key and/or sigs
127 # warn for expired key and/or sigs
128 for key in keys:
128 for key in keys:
129 if key[0] == "BADSIG":
129 if key[0] == "BADSIG":
130 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
130 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
131 continue
131 continue
132 if key[0] == "EXPSIG":
132 if key[0] == "EXPSIG":
133 ui.write(_("%s Note: Signature has expired"
133 ui.write(_("%s Note: Signature has expired"
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
135 elif key[0] == "EXPKEYSIG":
135 elif key[0] == "EXPKEYSIG":
136 ui.write(_("%s Note: This key has expired"
136 ui.write(_("%s Note: This key has expired"
137 " (signed by: \"%s\")\n") % (prefix, key[2]))
137 " (signed by: \"%s\")\n") % (prefix, key[2]))
138 validkeys.append((key[1], key[2], key[3]))
138 validkeys.append((key[1], key[2], key[3]))
139 return validkeys
139 return validkeys
140
140
141 @command("sigs", [], _('hg sigs'))
141 @command("sigs", [], _('hg sigs'))
142 def sigs(ui, repo):
142 def sigs(ui, repo):
143 """list signed changesets"""
143 """list signed changesets"""
144 mygpg = newgpg(ui)
144 mygpg = newgpg(ui)
145 revs = {}
145 revs = {}
146
146
147 for data, context in sigwalk(repo):
147 for data, context in sigwalk(repo):
148 node, version, sig = data
148 node, version, sig = data
149 fn, ln = context
149 fn, ln = context
150 try:
150 try:
151 n = repo.lookup(node)
151 n = repo.lookup(node)
152 except KeyError:
152 except KeyError:
153 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
153 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
154 continue
154 continue
155 r = repo.changelog.rev(n)
155 r = repo.changelog.rev(n)
156 keys = getkeys(ui, repo, mygpg, data, context)
156 keys = getkeys(ui, repo, mygpg, data, context)
157 if not keys:
157 if not keys:
158 continue
158 continue
159 revs.setdefault(r, [])
159 revs.setdefault(r, [])
160 revs[r].extend(keys)
160 revs[r].extend(keys)
161 for rev in sorted(revs, reverse=True):
161 for rev in sorted(revs, reverse=True):
162 for k in revs[rev]:
162 for k in revs[rev]:
163 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
163 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
164 ui.write("%-30s %s\n" % (keystr(ui, k), r))
164 ui.write("%-30s %s\n" % (keystr(ui, k), r))
165
165
166 @command("sigcheck", [], _('hg sigcheck REVISION'))
166 @command("sigcheck", [], _('hg sigcheck REVISION'))
167 def check(ui, repo, rev):
167 def check(ui, repo, rev):
168 """verify all the signatures there may be for a particular revision"""
168 """verify all the signatures there may be for a particular revision"""
169 mygpg = newgpg(ui)
169 mygpg = newgpg(ui)
170 rev = repo.lookup(rev)
170 rev = repo.lookup(rev)
171 hexrev = hgnode.hex(rev)
171 hexrev = hgnode.hex(rev)
172 keys = []
172 keys = []
173
173
174 for data, context in sigwalk(repo):
174 for data, context in sigwalk(repo):
175 node, version, sig = data
175 node, version, sig = data
176 if node == hexrev:
176 if node == hexrev:
177 k = getkeys(ui, repo, mygpg, data, context)
177 k = getkeys(ui, repo, mygpg, data, context)
178 if k:
178 if k:
179 keys.extend(k)
179 keys.extend(k)
180
180
181 if not keys:
181 if not keys:
182 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
182 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
183 return
183 return
184
184
185 # print summary
185 # print summary
186 ui.write("%s is signed by:\n" % hgnode.short(rev))
186 ui.write("%s is signed by:\n" % hgnode.short(rev))
187 for key in keys:
187 for key in keys:
188 ui.write(" %s\n" % keystr(ui, key))
188 ui.write(" %s\n" % keystr(ui, key))
189
189
190 def keystr(ui, key):
190 def keystr(ui, key):
191 """associate a string to a key (username, comment)"""
191 """associate a string to a key (username, comment)"""
192 keyid, user, fingerprint = key
192 keyid, user, fingerprint = key
193 comment = ui.config("gpg", fingerprint, None)
193 comment = ui.config("gpg", fingerprint, None)
194 if comment:
194 if comment:
195 return "%s (%s)" % (user, comment)
195 return "%s (%s)" % (user, comment)
196 else:
196 else:
197 return user
197 return user
198
198
199 @command("sign",
199 @command("sign",
200 [('l', 'local', None, _('make the signature local')),
200 [('l', 'local', None, _('make the signature local')),
201 ('f', 'force', None, _('sign even if the sigfile is modified')),
201 ('f', 'force', None, _('sign even if the sigfile is modified')),
202 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
202 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
203 ('k', 'key', '',
203 ('k', 'key', '',
204 _('the key id to sign with'), _('ID')),
204 _('the key id to sign with'), _('ID')),
205 ('m', 'message', '',
205 ('m', 'message', '',
206 _('commit message'), _('TEXT')),
206 _('commit message'), _('TEXT')),
207 ] + commands.commitopts2,
207 ] + commands.commitopts2,
208 _('hg sign [OPTION]... [REVISION]...'))
208 _('hg sign [OPTION]... [REVISION]...'))
209 def sign(ui, repo, *revs, **opts):
209 def sign(ui, repo, *revs, **opts):
210 """add a signature for the current or given revision
210 """add a signature for the current or given revision
211
211
212 If no revision is given, the parent of the working directory is used,
212 If no revision is given, the parent of the working directory is used,
213 or tip if no revision is checked out.
213 or tip if no revision is checked out.
214
214
215 See :hg:`help dates` for a list of formats valid for -d/--date.
215 See :hg:`help dates` for a list of formats valid for -d/--date.
216 """
216 """
217
217
218 mygpg = newgpg(ui, **opts)
218 mygpg = newgpg(ui, **opts)
219 sigver = "0"
219 sigver = "0"
220 sigmessage = ""
220 sigmessage = ""
221
221
222 date = opts.get('date')
222 date = opts.get('date')
223 if date:
223 if date:
224 opts['date'] = util.parsedate(date)
224 opts['date'] = util.parsedate(date)
225
225
226 if revs:
226 if revs:
227 nodes = [repo.lookup(n) for n in revs]
227 nodes = [repo.lookup(n) for n in revs]
228 else:
228 else:
229 nodes = [node for node in repo.dirstate.parents()
229 nodes = [node for node in repo.dirstate.parents()
230 if node != hgnode.nullid]
230 if node != hgnode.nullid]
231 if len(nodes) > 1:
231 if len(nodes) > 1:
232 raise util.Abort(_('uncommitted merge - please provide a '
232 raise util.Abort(_('uncommitted merge - please provide a '
233 'specific revision'))
233 'specific revision'))
234 if not nodes:
234 if not nodes:
235 nodes = [repo.changelog.tip()]
235 nodes = [repo.changelog.tip()]
236
236
237 for n in nodes:
237 for n in nodes:
238 hexnode = hgnode.hex(n)
238 hexnode = hgnode.hex(n)
239 ui.write(_("Signing %d:%s\n") % (repo.changelog.rev(n),
239 ui.write(_("Signing %d:%s\n") % (repo.changelog.rev(n),
240 hgnode.short(n)))
240 hgnode.short(n)))
241 # build data
241 # build data
242 data = node2txt(repo, n, sigver)
242 data = node2txt(repo, n, sigver)
243 sig = mygpg.sign(data)
243 sig = mygpg.sign(data)
244 if not sig:
244 if not sig:
245 raise util.Abort(_("error while signing"))
245 raise util.Abort(_("error while signing"))
246 sig = binascii.b2a_base64(sig)
246 sig = binascii.b2a_base64(sig)
247 sig = sig.replace("\n", "")
247 sig = sig.replace("\n", "")
248 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
248 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
249
249
250 # write it
250 # write it
251 if opts['local']:
251 if opts['local']:
252 repo.opener.append("localsigs", sigmessage)
252 repo.opener.append("localsigs", sigmessage)
253 return
253 return
254
254
255 msigs = match.exact(repo.root, '', ['.hgsigs'])
255 msigs = match.exact(repo.root, '', ['.hgsigs'])
256 s = repo.status(match=msigs, unknown=True, ignored=True)[:6]
256 s = repo.status(match=msigs, unknown=True, ignored=True)[:6]
257 if util.any(s) and not opts["force"]:
257 if util.any(s) and not opts["force"]:
258 raise util.Abort(_("working copy of .hgsigs is changed "
258 raise util.Abort(_("working copy of .hgsigs is changed "
259 "(please commit .hgsigs manually "
259 "(please commit .hgsigs manually "
260 "or use --force)"))
260 "or use --force)"))
261
261
262 sigsfile = repo.wfile(".hgsigs", "ab")
262 sigsfile = repo.wfile(".hgsigs", "ab")
263 sigsfile.write(sigmessage)
263 sigsfile.write(sigmessage)
264 sigsfile.close()
264 sigsfile.close()
265
265
266 if '.hgsigs' not in repo.dirstate:
266 if '.hgsigs' not in repo.dirstate:
267 repo[None].add([".hgsigs"])
267 repo[None].add([".hgsigs"])
268
268
269 if opts["no_commit"]:
269 if opts["no_commit"]:
270 return
270 return
271
271
272 message = opts['message']
272 message = opts['message']
273 if not message:
273 if not message:
274 # we don't translate commit messages
274 # we don't translate commit messages
275 message = "\n".join(["Added signature for changeset %s"
275 message = "\n".join(["Added signature for changeset %s"
276 % hgnode.short(n)
276 % hgnode.short(n)
277 for n in nodes])
277 for n in nodes])
278 try:
278 try:
279 repo.commit(message, opts['user'], opts['date'], match=msigs)
279 repo.commit(message, opts['user'], opts['date'], match=msigs)
280 except ValueError, inst:
280 except ValueError, inst:
281 raise util.Abort(str(inst))
281 raise util.Abort(str(inst))
282
282
283 def node2txt(repo, node, ver):
283 def node2txt(repo, node, ver):
284 """map a manifest into some text"""
284 """map a manifest into some text"""
285 if ver == "0":
285 if ver == "0":
286 return "%s\n" % hgnode.hex(node)
286 return "%s\n" % hgnode.hex(node)
287 else:
287 else:
288 raise util.Abort(_("unknown signature version"))
288 raise util.Abort(_("unknown signature version"))
289
289
@@ -1,279 +1,276 b''
1 # Copyright (C) 2007-8 Brendan Cully <brendan@kublai.com>
1 # Copyright (C) 2007-8 Brendan Cully <brendan@kublai.com>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 """hooks for integrating with the CIA.vc notification service
6 """hooks for integrating with the CIA.vc notification service
7
7
8 This is meant to be run as a changegroup or incoming hook. To
8 This is meant to be run as a changegroup or incoming hook. To
9 configure it, set the following options in your hgrc::
9 configure it, set the following options in your hgrc::
10
10
11 [cia]
11 [cia]
12 # your registered CIA user name
12 # your registered CIA user name
13 user = foo
13 user = foo
14 # the name of the project in CIA
14 # the name of the project in CIA
15 project = foo
15 project = foo
16 # the module (subproject) (optional)
16 # the module (subproject) (optional)
17 #module = foo
17 #module = foo
18 # Append a diffstat to the log message (optional)
18 # Append a diffstat to the log message (optional)
19 #diffstat = False
19 #diffstat = False
20 # Template to use for log messages (optional)
20 # Template to use for log messages (optional)
21 #template = {desc}\\n{baseurl}{webroot}/rev/{node}-- {diffstat}
21 #template = {desc}\\n{baseurl}{webroot}/rev/{node}-- {diffstat}
22 # Style to use (optional)
22 # Style to use (optional)
23 #style = foo
23 #style = foo
24 # The URL of the CIA notification service (optional)
24 # The URL of the CIA notification service (optional)
25 # You can use mailto: URLs to send by email, eg
25 # You can use mailto: URLs to send by email, eg
26 # mailto:cia@cia.vc
26 # mailto:cia@cia.vc
27 # Make sure to set email.from if you do this.
27 # Make sure to set email.from if you do this.
28 #url = http://cia.vc/
28 #url = http://cia.vc/
29 # print message instead of sending it (optional)
29 # print message instead of sending it (optional)
30 #test = False
30 #test = False
31 # number of slashes to strip for url paths
31 # number of slashes to strip for url paths
32 #strip = 0
32 #strip = 0
33
33
34 [hooks]
34 [hooks]
35 # one of these:
35 # one of these:
36 changegroup.cia = python:hgcia.hook
36 changegroup.cia = python:hgcia.hook
37 #incoming.cia = python:hgcia.hook
37 #incoming.cia = python:hgcia.hook
38
38
39 [web]
39 [web]
40 # If you want hyperlinks (optional)
40 # If you want hyperlinks (optional)
41 baseurl = http://server/path/to/repo
41 baseurl = http://server/path/to/repo
42 """
42 """
43
43
44 from mercurial.i18n import _
44 from mercurial.i18n import _
45 from mercurial.node import bin, short
45 from mercurial.node import bin, short
46 from mercurial import cmdutil, patch, templater, util, mail
46 from mercurial import cmdutil, patch, templater, util, mail
47 import email.Parser
47 import email.Parser
48
48
49 import xmlrpclib
49 import socket, xmlrpclib
50 from xml.sax import saxutils
50 from xml.sax import saxutils
51
51
52 socket_timeout = 30 # seconds
52 socket_timeout = 30 # seconds
53 try:
53 if util.safehasattr(socket, 'setdefaulttimeout'):
54 # set a timeout for the socket so you don't have to wait so looooong
54 # set a timeout for the socket so you don't have to wait so looooong
55 # when cia.vc is having problems. requires python >= 2.3:
55 # when cia.vc is having problems. requires python >= 2.3:
56 import socket
57 socket.setdefaulttimeout(socket_timeout)
56 socket.setdefaulttimeout(socket_timeout)
58 except:
59 pass
60
57
61 HGCIA_VERSION = '0.1'
58 HGCIA_VERSION = '0.1'
62 HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia'
59 HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia'
63
60
64
61
65 class ciamsg(object):
62 class ciamsg(object):
66 """ A CIA message """
63 """ A CIA message """
67 def __init__(self, cia, ctx):
64 def __init__(self, cia, ctx):
68 self.cia = cia
65 self.cia = cia
69 self.ctx = ctx
66 self.ctx = ctx
70 self.url = self.cia.url
67 self.url = self.cia.url
71 if self.url:
68 if self.url:
72 self.url += self.cia.root
69 self.url += self.cia.root
73
70
74 def fileelem(self, path, uri, action):
71 def fileelem(self, path, uri, action):
75 if uri:
72 if uri:
76 uri = ' uri=%s' % saxutils.quoteattr(uri)
73 uri = ' uri=%s' % saxutils.quoteattr(uri)
77 return '<file%s action=%s>%s</file>' % (
74 return '<file%s action=%s>%s</file>' % (
78 uri, saxutils.quoteattr(action), saxutils.escape(path))
75 uri, saxutils.quoteattr(action), saxutils.escape(path))
79
76
80 def fileelems(self):
77 def fileelems(self):
81 n = self.ctx.node()
78 n = self.ctx.node()
82 f = self.cia.repo.status(self.ctx.p1().node(), n)
79 f = self.cia.repo.status(self.ctx.p1().node(), n)
83 url = self.url or ''
80 url = self.url or ''
84 if url and url[-1] == '/':
81 if url and url[-1] == '/':
85 url = url[:-1]
82 url = url[:-1]
86 elems = []
83 elems = []
87 for path in f[0]:
84 for path in f[0]:
88 uri = '%s/diff/%s/%s' % (url, short(n), path)
85 uri = '%s/diff/%s/%s' % (url, short(n), path)
89 elems.append(self.fileelem(path, url and uri, 'modify'))
86 elems.append(self.fileelem(path, url and uri, 'modify'))
90 for path in f[1]:
87 for path in f[1]:
91 # TODO: copy/rename ?
88 # TODO: copy/rename ?
92 uri = '%s/file/%s/%s' % (url, short(n), path)
89 uri = '%s/file/%s/%s' % (url, short(n), path)
93 elems.append(self.fileelem(path, url and uri, 'add'))
90 elems.append(self.fileelem(path, url and uri, 'add'))
94 for path in f[2]:
91 for path in f[2]:
95 elems.append(self.fileelem(path, '', 'remove'))
92 elems.append(self.fileelem(path, '', 'remove'))
96
93
97 return '\n'.join(elems)
94 return '\n'.join(elems)
98
95
99 def sourceelem(self, project, module=None, branch=None):
96 def sourceelem(self, project, module=None, branch=None):
100 msg = ['<source>', '<project>%s</project>' % saxutils.escape(project)]
97 msg = ['<source>', '<project>%s</project>' % saxutils.escape(project)]
101 if module:
98 if module:
102 msg.append('<module>%s</module>' % saxutils.escape(module))
99 msg.append('<module>%s</module>' % saxutils.escape(module))
103 if branch:
100 if branch:
104 msg.append('<branch>%s</branch>' % saxutils.escape(branch))
101 msg.append('<branch>%s</branch>' % saxutils.escape(branch))
105 msg.append('</source>')
102 msg.append('</source>')
106
103
107 return '\n'.join(msg)
104 return '\n'.join(msg)
108
105
109 def diffstat(self):
106 def diffstat(self):
110 class patchbuf(object):
107 class patchbuf(object):
111 def __init__(self):
108 def __init__(self):
112 self.lines = []
109 self.lines = []
113 # diffstat is stupid
110 # diffstat is stupid
114 self.name = 'cia'
111 self.name = 'cia'
115 def write(self, data):
112 def write(self, data):
116 self.lines += data.splitlines(True)
113 self.lines += data.splitlines(True)
117 def close(self):
114 def close(self):
118 pass
115 pass
119
116
120 n = self.ctx.node()
117 n = self.ctx.node()
121 pbuf = patchbuf()
118 pbuf = patchbuf()
122 cmdutil.export(self.cia.repo, [n], fp=pbuf)
119 cmdutil.export(self.cia.repo, [n], fp=pbuf)
123 return patch.diffstat(pbuf.lines) or ''
120 return patch.diffstat(pbuf.lines) or ''
124
121
125 def logmsg(self):
122 def logmsg(self):
126 diffstat = self.cia.diffstat and self.diffstat() or ''
123 diffstat = self.cia.diffstat and self.diffstat() or ''
127 self.cia.ui.pushbuffer()
124 self.cia.ui.pushbuffer()
128 self.cia.templater.show(self.ctx, changes=self.ctx.changeset(),
125 self.cia.templater.show(self.ctx, changes=self.ctx.changeset(),
129 baseurl=self.cia.ui.config('web', 'baseurl'),
126 baseurl=self.cia.ui.config('web', 'baseurl'),
130 url=self.url, diffstat=diffstat,
127 url=self.url, diffstat=diffstat,
131 webroot=self.cia.root)
128 webroot=self.cia.root)
132 return self.cia.ui.popbuffer()
129 return self.cia.ui.popbuffer()
133
130
134 def xml(self):
131 def xml(self):
135 n = short(self.ctx.node())
132 n = short(self.ctx.node())
136 src = self.sourceelem(self.cia.project, module=self.cia.module,
133 src = self.sourceelem(self.cia.project, module=self.cia.module,
137 branch=self.ctx.branch())
134 branch=self.ctx.branch())
138 # unix timestamp
135 # unix timestamp
139 dt = self.ctx.date()
136 dt = self.ctx.date()
140 timestamp = dt[0]
137 timestamp = dt[0]
141
138
142 author = saxutils.escape(self.ctx.user())
139 author = saxutils.escape(self.ctx.user())
143 rev = '%d:%s' % (self.ctx.rev(), n)
140 rev = '%d:%s' % (self.ctx.rev(), n)
144 log = saxutils.escape(self.logmsg())
141 log = saxutils.escape(self.logmsg())
145
142
146 url = self.url
143 url = self.url
147 if url and url[-1] == '/':
144 if url and url[-1] == '/':
148 url = url[:-1]
145 url = url[:-1]
149 url = url and '<url>%s/rev/%s</url>' % (saxutils.escape(url), n) or ''
146 url = url and '<url>%s/rev/%s</url>' % (saxutils.escape(url), n) or ''
150
147
151 msg = """
148 msg = """
152 <message>
149 <message>
153 <generator>
150 <generator>
154 <name>Mercurial (hgcia)</name>
151 <name>Mercurial (hgcia)</name>
155 <version>%s</version>
152 <version>%s</version>
156 <url>%s</url>
153 <url>%s</url>
157 <user>%s</user>
154 <user>%s</user>
158 </generator>
155 </generator>
159 %s
156 %s
160 <body>
157 <body>
161 <commit>
158 <commit>
162 <author>%s</author>
159 <author>%s</author>
163 <version>%s</version>
160 <version>%s</version>
164 <log>%s</log>
161 <log>%s</log>
165 %s
162 %s
166 <files>%s</files>
163 <files>%s</files>
167 </commit>
164 </commit>
168 </body>
165 </body>
169 <timestamp>%d</timestamp>
166 <timestamp>%d</timestamp>
170 </message>
167 </message>
171 """ % \
168 """ % \
172 (HGCIA_VERSION, saxutils.escape(HGCIA_URL),
169 (HGCIA_VERSION, saxutils.escape(HGCIA_URL),
173 saxutils.escape(self.cia.user), src, author, rev, log, url,
170 saxutils.escape(self.cia.user), src, author, rev, log, url,
174 self.fileelems(), timestamp)
171 self.fileelems(), timestamp)
175
172
176 return msg
173 return msg
177
174
178
175
179 class hgcia(object):
176 class hgcia(object):
180 """ CIA notification class """
177 """ CIA notification class """
181
178
182 deftemplate = '{desc}'
179 deftemplate = '{desc}'
183 dstemplate = '{desc}\n-- \n{diffstat}'
180 dstemplate = '{desc}\n-- \n{diffstat}'
184
181
185 def __init__(self, ui, repo):
182 def __init__(self, ui, repo):
186 self.ui = ui
183 self.ui = ui
187 self.repo = repo
184 self.repo = repo
188
185
189 self.ciaurl = self.ui.config('cia', 'url', 'http://cia.vc')
186 self.ciaurl = self.ui.config('cia', 'url', 'http://cia.vc')
190 self.user = self.ui.config('cia', 'user')
187 self.user = self.ui.config('cia', 'user')
191 self.project = self.ui.config('cia', 'project')
188 self.project = self.ui.config('cia', 'project')
192 self.module = self.ui.config('cia', 'module')
189 self.module = self.ui.config('cia', 'module')
193 self.diffstat = self.ui.configbool('cia', 'diffstat')
190 self.diffstat = self.ui.configbool('cia', 'diffstat')
194 self.emailfrom = self.ui.config('email', 'from')
191 self.emailfrom = self.ui.config('email', 'from')
195 self.dryrun = self.ui.configbool('cia', 'test')
192 self.dryrun = self.ui.configbool('cia', 'test')
196 self.url = self.ui.config('web', 'baseurl')
193 self.url = self.ui.config('web', 'baseurl')
197 # Default to -1 for backward compatibility
194 # Default to -1 for backward compatibility
198 self.stripcount = int(self.ui.config('cia', 'strip', -1))
195 self.stripcount = int(self.ui.config('cia', 'strip', -1))
199 self.root = self.strip(self.repo.root)
196 self.root = self.strip(self.repo.root)
200
197
201 style = self.ui.config('cia', 'style')
198 style = self.ui.config('cia', 'style')
202 template = self.ui.config('cia', 'template')
199 template = self.ui.config('cia', 'template')
203 if not template:
200 if not template:
204 template = self.diffstat and self.dstemplate or self.deftemplate
201 template = self.diffstat and self.dstemplate or self.deftemplate
205 template = templater.parsestring(template, quoted=False)
202 template = templater.parsestring(template, quoted=False)
206 t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
203 t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
207 style, False)
204 style, False)
208 t.use_template(template)
205 t.use_template(template)
209 self.templater = t
206 self.templater = t
210
207
211 def strip(self, path):
208 def strip(self, path):
212 '''strip leading slashes from local path, turn into web-safe path.'''
209 '''strip leading slashes from local path, turn into web-safe path.'''
213
210
214 path = util.pconvert(path)
211 path = util.pconvert(path)
215 count = self.stripcount
212 count = self.stripcount
216 if count < 0:
213 if count < 0:
217 return ''
214 return ''
218 while count > 0:
215 while count > 0:
219 c = path.find('/')
216 c = path.find('/')
220 if c == -1:
217 if c == -1:
221 break
218 break
222 path = path[c + 1:]
219 path = path[c + 1:]
223 count -= 1
220 count -= 1
224 return path
221 return path
225
222
226 def sendrpc(self, msg):
223 def sendrpc(self, msg):
227 srv = xmlrpclib.Server(self.ciaurl)
224 srv = xmlrpclib.Server(self.ciaurl)
228 res = srv.hub.deliver(msg)
225 res = srv.hub.deliver(msg)
229 if res is not True and res != 'queued.':
226 if res is not True and res != 'queued.':
230 raise util.Abort(_('%s returned an error: %s') %
227 raise util.Abort(_('%s returned an error: %s') %
231 (self.ciaurl, res))
228 (self.ciaurl, res))
232
229
233 def sendemail(self, address, data):
230 def sendemail(self, address, data):
234 p = email.Parser.Parser()
231 p = email.Parser.Parser()
235 msg = p.parsestr(data)
232 msg = p.parsestr(data)
236 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
233 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
237 msg['To'] = address
234 msg['To'] = address
238 msg['From'] = self.emailfrom
235 msg['From'] = self.emailfrom
239 msg['Subject'] = 'DeliverXML'
236 msg['Subject'] = 'DeliverXML'
240 msg['Content-type'] = 'text/xml'
237 msg['Content-type'] = 'text/xml'
241 msgtext = msg.as_string()
238 msgtext = msg.as_string()
242
239
243 self.ui.status(_('hgcia: sending update to %s\n') % address)
240 self.ui.status(_('hgcia: sending update to %s\n') % address)
244 mail.sendmail(self.ui, util.email(self.emailfrom),
241 mail.sendmail(self.ui, util.email(self.emailfrom),
245 [address], msgtext)
242 [address], msgtext)
246
243
247
244
248 def hook(ui, repo, hooktype, node=None, url=None, **kwargs):
245 def hook(ui, repo, hooktype, node=None, url=None, **kwargs):
249 """ send CIA notification """
246 """ send CIA notification """
250 def sendmsg(cia, ctx):
247 def sendmsg(cia, ctx):
251 msg = ciamsg(cia, ctx).xml()
248 msg = ciamsg(cia, ctx).xml()
252 if cia.dryrun:
249 if cia.dryrun:
253 ui.write(msg)
250 ui.write(msg)
254 elif cia.ciaurl.startswith('mailto:'):
251 elif cia.ciaurl.startswith('mailto:'):
255 if not cia.emailfrom:
252 if not cia.emailfrom:
256 raise util.Abort(_('email.from must be defined when '
253 raise util.Abort(_('email.from must be defined when '
257 'sending by email'))
254 'sending by email'))
258 cia.sendemail(cia.ciaurl[7:], msg)
255 cia.sendemail(cia.ciaurl[7:], msg)
259 else:
256 else:
260 cia.sendrpc(msg)
257 cia.sendrpc(msg)
261
258
262 n = bin(node)
259 n = bin(node)
263 cia = hgcia(ui, repo)
260 cia = hgcia(ui, repo)
264 if not cia.user:
261 if not cia.user:
265 ui.debug('cia: no user specified')
262 ui.debug('cia: no user specified')
266 return
263 return
267 if not cia.project:
264 if not cia.project:
268 ui.debug('cia: no project specified')
265 ui.debug('cia: no project specified')
269 return
266 return
270 if hooktype == 'changegroup':
267 if hooktype == 'changegroup':
271 start = repo.changelog.rev(n)
268 start = repo.changelog.rev(n)
272 end = len(repo.changelog)
269 end = len(repo.changelog)
273 for rev in xrange(start, end):
270 for rev in xrange(start, end):
274 n = repo.changelog.node(rev)
271 n = repo.changelog.node(rev)
275 ctx = repo.changectx(n)
272 ctx = repo.changectx(n)
276 sendmsg(cia, ctx)
273 sendmsg(cia, ctx)
277 else:
274 else:
278 ctx = repo.changectx(n)
275 ctx = repo.changectx(n)
279 sendmsg(cia, ctx)
276 sendmsg(cia, ctx)
@@ -1,492 +1,492 b''
1 # server.py - common entry point for inotify status server
1 # server.py - common entry point for inotify status server
2 #
2 #
3 # Copyright 2009 Nicolas Dumazet <nicdumz@gmail.com>
3 # Copyright 2009 Nicolas Dumazet <nicdumz@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from mercurial.i18n import _
8 from mercurial.i18n import _
9 from mercurial import cmdutil, osutil, util
9 from mercurial import cmdutil, osutil, util
10 import common
10 import common
11
11
12 import errno
12 import errno
13 import os
13 import os
14 import socket
14 import socket
15 import stat
15 import stat
16 import struct
16 import struct
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19
19
20 class AlreadyStartedException(Exception):
20 class AlreadyStartedException(Exception):
21 pass
21 pass
22 class TimeoutException(Exception):
22 class TimeoutException(Exception):
23 pass
23 pass
24
24
25 def join(a, b):
25 def join(a, b):
26 if a:
26 if a:
27 if a[-1] == '/':
27 if a[-1] == '/':
28 return a + b
28 return a + b
29 return a + '/' + b
29 return a + '/' + b
30 return b
30 return b
31
31
32 def split(path):
32 def split(path):
33 c = path.rfind('/')
33 c = path.rfind('/')
34 if c == -1:
34 if c == -1:
35 return '', path
35 return '', path
36 return path[:c], path[c + 1:]
36 return path[:c], path[c + 1:]
37
37
38 walk_ignored_errors = (errno.ENOENT, errno.ENAMETOOLONG)
38 walk_ignored_errors = (errno.ENOENT, errno.ENAMETOOLONG)
39
39
40 def walk(dirstate, absroot, root):
40 def walk(dirstate, absroot, root):
41 '''Like os.walk, but only yields regular files.'''
41 '''Like os.walk, but only yields regular files.'''
42
42
43 # This function is critical to performance during startup.
43 # This function is critical to performance during startup.
44
44
45 def walkit(root, reporoot):
45 def walkit(root, reporoot):
46 files, dirs = [], []
46 files, dirs = [], []
47
47
48 try:
48 try:
49 fullpath = join(absroot, root)
49 fullpath = join(absroot, root)
50 for name, kind in osutil.listdir(fullpath):
50 for name, kind in osutil.listdir(fullpath):
51 if kind == stat.S_IFDIR:
51 if kind == stat.S_IFDIR:
52 if name == '.hg':
52 if name == '.hg':
53 if not reporoot:
53 if not reporoot:
54 return
54 return
55 else:
55 else:
56 dirs.append(name)
56 dirs.append(name)
57 path = join(root, name)
57 path = join(root, name)
58 if dirstate._ignore(path):
58 if dirstate._ignore(path):
59 continue
59 continue
60 for result in walkit(path, False):
60 for result in walkit(path, False):
61 yield result
61 yield result
62 elif kind in (stat.S_IFREG, stat.S_IFLNK):
62 elif kind in (stat.S_IFREG, stat.S_IFLNK):
63 files.append(name)
63 files.append(name)
64 yield fullpath, dirs, files
64 yield fullpath, dirs, files
65
65
66 except OSError, err:
66 except OSError, err:
67 if err.errno == errno.ENOTDIR:
67 if err.errno == errno.ENOTDIR:
68 # fullpath was a directory, but has since been replaced
68 # fullpath was a directory, but has since been replaced
69 # by a file.
69 # by a file.
70 yield fullpath, dirs, files
70 yield fullpath, dirs, files
71 elif err.errno not in walk_ignored_errors:
71 elif err.errno not in walk_ignored_errors:
72 raise
72 raise
73
73
74 return walkit(root, root == '')
74 return walkit(root, root == '')
75
75
76 class directory(object):
76 class directory(object):
77 """
77 """
78 Representing a directory
78 Representing a directory
79
79
80 * path is the relative path from repo root to this directory
80 * path is the relative path from repo root to this directory
81 * files is a dict listing the files in this directory
81 * files is a dict listing the files in this directory
82 - keys are file names
82 - keys are file names
83 - values are file status
83 - values are file status
84 * dirs is a dict listing the subdirectories
84 * dirs is a dict listing the subdirectories
85 - key are subdirectories names
85 - key are subdirectories names
86 - values are directory objects
86 - values are directory objects
87 """
87 """
88 def __init__(self, relpath=''):
88 def __init__(self, relpath=''):
89 self.path = relpath
89 self.path = relpath
90 self.files = {}
90 self.files = {}
91 self.dirs = {}
91 self.dirs = {}
92
92
93 def dir(self, relpath):
93 def dir(self, relpath):
94 """
94 """
95 Returns the directory contained at the relative path relpath.
95 Returns the directory contained at the relative path relpath.
96 Creates the intermediate directories if necessary.
96 Creates the intermediate directories if necessary.
97 """
97 """
98 if not relpath:
98 if not relpath:
99 return self
99 return self
100 l = relpath.split('/')
100 l = relpath.split('/')
101 ret = self
101 ret = self
102 while l:
102 while l:
103 next = l.pop(0)
103 next = l.pop(0)
104 try:
104 try:
105 ret = ret.dirs[next]
105 ret = ret.dirs[next]
106 except KeyError:
106 except KeyError:
107 d = directory(join(ret.path, next))
107 d = directory(join(ret.path, next))
108 ret.dirs[next] = d
108 ret.dirs[next] = d
109 ret = d
109 ret = d
110 return ret
110 return ret
111
111
112 def walk(self, states, visited=None):
112 def walk(self, states, visited=None):
113 """
113 """
114 yield (filename, status) pairs for items in the trees
114 yield (filename, status) pairs for items in the trees
115 that have status in states.
115 that have status in states.
116 filenames are relative to the repo root
116 filenames are relative to the repo root
117 """
117 """
118 for file, st in self.files.iteritems():
118 for file, st in self.files.iteritems():
119 if st in states:
119 if st in states:
120 yield join(self.path, file), st
120 yield join(self.path, file), st
121 for dir in self.dirs.itervalues():
121 for dir in self.dirs.itervalues():
122 if visited is not None:
122 if visited is not None:
123 visited.add(dir.path)
123 visited.add(dir.path)
124 for e in dir.walk(states):
124 for e in dir.walk(states):
125 yield e
125 yield e
126
126
127 def lookup(self, states, path, visited):
127 def lookup(self, states, path, visited):
128 """
128 """
129 yield root-relative filenames that match path, and whose
129 yield root-relative filenames that match path, and whose
130 status are in states:
130 status are in states:
131 * if path is a file, yield path
131 * if path is a file, yield path
132 * if path is a directory, yield directory files
132 * if path is a directory, yield directory files
133 * if path is not tracked, yield nothing
133 * if path is not tracked, yield nothing
134 """
134 """
135 if path[-1] == '/':
135 if path[-1] == '/':
136 path = path[:-1]
136 path = path[:-1]
137
137
138 paths = path.split('/')
138 paths = path.split('/')
139
139
140 # we need to check separately for last node
140 # we need to check separately for last node
141 last = paths.pop()
141 last = paths.pop()
142
142
143 tree = self
143 tree = self
144 try:
144 try:
145 for dir in paths:
145 for dir in paths:
146 tree = tree.dirs[dir]
146 tree = tree.dirs[dir]
147 except KeyError:
147 except KeyError:
148 # path is not tracked
148 # path is not tracked
149 visited.add(tree.path)
149 visited.add(tree.path)
150 return
150 return
151
151
152 try:
152 try:
153 # if path is a directory, walk it
153 # if path is a directory, walk it
154 target = tree.dirs[last]
154 target = tree.dirs[last]
155 visited.add(target.path)
155 visited.add(target.path)
156 for file, st in target.walk(states, visited):
156 for file, st in target.walk(states, visited):
157 yield file
157 yield file
158 except KeyError:
158 except KeyError:
159 try:
159 try:
160 if tree.files[last] in states:
160 if tree.files[last] in states:
161 # path is a file
161 # path is a file
162 visited.add(tree.path)
162 visited.add(tree.path)
163 yield path
163 yield path
164 except KeyError:
164 except KeyError:
165 # path is not tracked
165 # path is not tracked
166 pass
166 pass
167
167
168 class repowatcher(object):
168 class repowatcher(object):
169 """
169 """
170 Watches inotify events
170 Watches inotify events
171 """
171 """
172 statuskeys = 'almr!?'
172 statuskeys = 'almr!?'
173
173
174 def __init__(self, ui, dirstate, root):
174 def __init__(self, ui, dirstate, root):
175 self.ui = ui
175 self.ui = ui
176 self.dirstate = dirstate
176 self.dirstate = dirstate
177
177
178 self.wprefix = join(root, '')
178 self.wprefix = join(root, '')
179 self.prefixlen = len(self.wprefix)
179 self.prefixlen = len(self.wprefix)
180
180
181 self.tree = directory()
181 self.tree = directory()
182 self.statcache = {}
182 self.statcache = {}
183 self.statustrees = dict([(s, directory()) for s in self.statuskeys])
183 self.statustrees = dict([(s, directory()) for s in self.statuskeys])
184
184
185 self.ds_info = self.dirstate_info()
185 self.ds_info = self.dirstate_info()
186
186
187 self.last_event = None
187 self.last_event = None
188
188
189
189
190 def handle_timeout(self):
190 def handle_timeout(self):
191 pass
191 pass
192
192
193 def dirstate_info(self):
193 def dirstate_info(self):
194 try:
194 try:
195 st = os.lstat(self.wprefix + '.hg/dirstate')
195 st = os.lstat(self.wprefix + '.hg/dirstate')
196 return st.st_mtime, st.st_ino
196 return st.st_mtime, st.st_ino
197 except OSError, err:
197 except OSError, err:
198 if err.errno != errno.ENOENT:
198 if err.errno != errno.ENOENT:
199 raise
199 raise
200 return 0, 0
200 return 0, 0
201
201
202 def filestatus(self, fn, st):
202 def filestatus(self, fn, st):
203 try:
203 try:
204 type_, mode, size, time = self.dirstate._map[fn][:4]
204 type_, mode, size, time = self.dirstate._map[fn][:4]
205 except KeyError:
205 except KeyError:
206 type_ = '?'
206 type_ = '?'
207 if type_ == 'n':
207 if type_ == 'n':
208 st_mode, st_size, st_mtime = st
208 st_mode, st_size, st_mtime = st
209 if size == -1:
209 if size == -1:
210 return 'l'
210 return 'l'
211 if size and (size != st_size or (mode ^ st_mode) & 0100):
211 if size and (size != st_size or (mode ^ st_mode) & 0100):
212 return 'm'
212 return 'm'
213 if time != int(st_mtime):
213 if time != int(st_mtime):
214 return 'l'
214 return 'l'
215 return 'n'
215 return 'n'
216 if type_ == '?' and self.dirstate._dirignore(fn):
216 if type_ == '?' and self.dirstate._dirignore(fn):
217 # we must check not only if the file is ignored, but if any part
217 # we must check not only if the file is ignored, but if any part
218 # of its path match an ignore pattern
218 # of its path match an ignore pattern
219 return 'i'
219 return 'i'
220 return type_
220 return type_
221
221
222 def updatefile(self, wfn, osstat):
222 def updatefile(self, wfn, osstat):
223 '''
223 '''
224 update the file entry of an existing file.
224 update the file entry of an existing file.
225
225
226 osstat: (mode, size, time) tuple, as returned by os.lstat(wfn)
226 osstat: (mode, size, time) tuple, as returned by os.lstat(wfn)
227 '''
227 '''
228
228
229 self._updatestatus(wfn, self.filestatus(wfn, osstat))
229 self._updatestatus(wfn, self.filestatus(wfn, osstat))
230
230
231 def deletefile(self, wfn, oldstatus):
231 def deletefile(self, wfn, oldstatus):
232 '''
232 '''
233 update the entry of a file which has been deleted.
233 update the entry of a file which has been deleted.
234
234
235 oldstatus: char in statuskeys, status of the file before deletion
235 oldstatus: char in statuskeys, status of the file before deletion
236 '''
236 '''
237 if oldstatus == 'r':
237 if oldstatus == 'r':
238 newstatus = 'r'
238 newstatus = 'r'
239 elif oldstatus in 'almn':
239 elif oldstatus in 'almn':
240 newstatus = '!'
240 newstatus = '!'
241 else:
241 else:
242 newstatus = None
242 newstatus = None
243
243
244 self.statcache.pop(wfn, None)
244 self.statcache.pop(wfn, None)
245 self._updatestatus(wfn, newstatus)
245 self._updatestatus(wfn, newstatus)
246
246
247 def _updatestatus(self, wfn, newstatus):
247 def _updatestatus(self, wfn, newstatus):
248 '''
248 '''
249 Update the stored status of a file.
249 Update the stored status of a file.
250
250
251 newstatus: - char in (statuskeys + 'ni'), new status to apply.
251 newstatus: - char in (statuskeys + 'ni'), new status to apply.
252 - or None, to stop tracking wfn
252 - or None, to stop tracking wfn
253 '''
253 '''
254 root, fn = split(wfn)
254 root, fn = split(wfn)
255 d = self.tree.dir(root)
255 d = self.tree.dir(root)
256
256
257 oldstatus = d.files.get(fn)
257 oldstatus = d.files.get(fn)
258 # oldstatus can be either:
258 # oldstatus can be either:
259 # - None : fn is new
259 # - None : fn is new
260 # - a char in statuskeys: fn is a (tracked) file
260 # - a char in statuskeys: fn is a (tracked) file
261
261
262 if self.ui.debugflag and oldstatus != newstatus:
262 if self.ui.debugflag and oldstatus != newstatus:
263 self.ui.note(_('status: %r %s -> %s\n') %
263 self.ui.note(_('status: %r %s -> %s\n') %
264 (wfn, oldstatus, newstatus))
264 (wfn, oldstatus, newstatus))
265
265
266 if oldstatus and oldstatus in self.statuskeys \
266 if oldstatus and oldstatus in self.statuskeys \
267 and oldstatus != newstatus:
267 and oldstatus != newstatus:
268 del self.statustrees[oldstatus].dir(root).files[fn]
268 del self.statustrees[oldstatus].dir(root).files[fn]
269
269
270 if newstatus in (None, 'i'):
270 if newstatus in (None, 'i'):
271 d.files.pop(fn, None)
271 d.files.pop(fn, None)
272 elif oldstatus != newstatus:
272 elif oldstatus != newstatus:
273 d.files[fn] = newstatus
273 d.files[fn] = newstatus
274 if newstatus != 'n':
274 if newstatus != 'n':
275 self.statustrees[newstatus].dir(root).files[fn] = newstatus
275 self.statustrees[newstatus].dir(root).files[fn] = newstatus
276
276
277 def check_deleted(self, key):
277 def check_deleted(self, key):
278 # Files that had been deleted but were present in the dirstate
278 # Files that had been deleted but were present in the dirstate
279 # may have vanished from the dirstate; we must clean them up.
279 # may have vanished from the dirstate; we must clean them up.
280 nuke = []
280 nuke = []
281 for wfn, ignore in self.statustrees[key].walk(key):
281 for wfn, ignore in self.statustrees[key].walk(key):
282 if wfn not in self.dirstate:
282 if wfn not in self.dirstate:
283 nuke.append(wfn)
283 nuke.append(wfn)
284 for wfn in nuke:
284 for wfn in nuke:
285 root, fn = split(wfn)
285 root, fn = split(wfn)
286 del self.statustrees[key].dir(root).files[fn]
286 del self.statustrees[key].dir(root).files[fn]
287 del self.tree.dir(root).files[fn]
287 del self.tree.dir(root).files[fn]
288
288
289 def update_hgignore(self):
289 def update_hgignore(self):
290 # An update of the ignore file can potentially change the
290 # An update of the ignore file can potentially change the
291 # states of all unknown and ignored files.
291 # states of all unknown and ignored files.
292
292
293 # XXX If the user has other ignore files outside the repo, or
293 # XXX If the user has other ignore files outside the repo, or
294 # changes their list of ignore files at run time, we'll
294 # changes their list of ignore files at run time, we'll
295 # potentially never see changes to them. We could get the
295 # potentially never see changes to them. We could get the
296 # client to report to us what ignore data they're using.
296 # client to report to us what ignore data they're using.
297 # But it's easier to do nothing than to open that can of
297 # But it's easier to do nothing than to open that can of
298 # worms.
298 # worms.
299
299
300 if '_ignore' in self.dirstate.__dict__:
300 if '_ignore' in self.dirstate.__dict__:
301 delattr(self.dirstate, '_ignore')
301 delattr(self.dirstate, '_ignore')
302 self.ui.note(_('rescanning due to .hgignore change\n'))
302 self.ui.note(_('rescanning due to .hgignore change\n'))
303 self.handle_timeout()
303 self.handle_timeout()
304 self.scan()
304 self.scan()
305
305
306 def getstat(self, wpath):
306 def getstat(self, wpath):
307 try:
307 try:
308 return self.statcache[wpath]
308 return self.statcache[wpath]
309 except KeyError:
309 except KeyError:
310 try:
310 try:
311 return self.stat(wpath)
311 return self.stat(wpath)
312 except OSError, err:
312 except OSError, err:
313 if err.errno != errno.ENOENT:
313 if err.errno != errno.ENOENT:
314 raise
314 raise
315
315
316 def stat(self, wpath):
316 def stat(self, wpath):
317 try:
317 try:
318 st = os.lstat(join(self.wprefix, wpath))
318 st = os.lstat(join(self.wprefix, wpath))
319 ret = st.st_mode, st.st_size, st.st_mtime
319 ret = st.st_mode, st.st_size, st.st_mtime
320 self.statcache[wpath] = ret
320 self.statcache[wpath] = ret
321 return ret
321 return ret
322 except OSError:
322 except OSError:
323 self.statcache.pop(wpath, None)
323 self.statcache.pop(wpath, None)
324 raise
324 raise
325
325
326 class socketlistener(object):
326 class socketlistener(object):
327 """
327 """
328 Listens for client queries on unix socket inotify.sock
328 Listens for client queries on unix socket inotify.sock
329 """
329 """
330 def __init__(self, ui, root, repowatcher, timeout):
330 def __init__(self, ui, root, repowatcher, timeout):
331 self.ui = ui
331 self.ui = ui
332 self.repowatcher = repowatcher
332 self.repowatcher = repowatcher
333 self.sock = socket.socket(socket.AF_UNIX)
333 self.sock = socket.socket(socket.AF_UNIX)
334 self.sockpath = join(root, '.hg/inotify.sock')
334 self.sockpath = join(root, '.hg/inotify.sock')
335
335
336 self.realsockpath = self.sockpath
336 self.realsockpath = self.sockpath
337 if os.path.islink(self.sockpath):
337 if os.path.islink(self.sockpath):
338 if os.path.exists(self.sockpath):
338 if os.path.exists(self.sockpath):
339 self.realsockpath = os.readlink(self.sockpath)
339 self.realsockpath = os.readlink(self.sockpath)
340 else:
340 else:
341 raise util.Abort('inotify-server: cannot start: '
341 raise util.Abort('inotify-server: cannot start: '
342 '.hg/inotify.sock is a broken symlink')
342 '.hg/inotify.sock is a broken symlink')
343 try:
343 try:
344 self.sock.bind(self.realsockpath)
344 self.sock.bind(self.realsockpath)
345 except socket.error, err:
345 except socket.error, err:
346 if err.args[0] == errno.EADDRINUSE:
346 if err.args[0] == errno.EADDRINUSE:
347 raise AlreadyStartedException(_('cannot start: socket is '
347 raise AlreadyStartedException(_('cannot start: socket is '
348 'already bound'))
348 'already bound'))
349 if err.args[0] == "AF_UNIX path too long":
349 if err.args[0] == "AF_UNIX path too long":
350 tempdir = tempfile.mkdtemp(prefix="hg-inotify-")
350 tempdir = tempfile.mkdtemp(prefix="hg-inotify-")
351 self.realsockpath = os.path.join(tempdir, "inotify.sock")
351 self.realsockpath = os.path.join(tempdir, "inotify.sock")
352 try:
352 try:
353 self.sock.bind(self.realsockpath)
353 self.sock.bind(self.realsockpath)
354 os.symlink(self.realsockpath, self.sockpath)
354 os.symlink(self.realsockpath, self.sockpath)
355 except (OSError, socket.error), inst:
355 except (OSError, socket.error), inst:
356 try:
356 try:
357 os.unlink(self.realsockpath)
357 os.unlink(self.realsockpath)
358 except:
358 except OSError:
359 pass
359 pass
360 os.rmdir(tempdir)
360 os.rmdir(tempdir)
361 if inst.errno == errno.EEXIST:
361 if inst.errno == errno.EEXIST:
362 raise AlreadyStartedException(_('cannot start: tried '
362 raise AlreadyStartedException(_('cannot start: tried '
363 'linking .hg/inotify.sock to a temporary socket but'
363 'linking .hg/inotify.sock to a temporary socket but'
364 ' .hg/inotify.sock already exists'))
364 ' .hg/inotify.sock already exists'))
365 raise
365 raise
366 else:
366 else:
367 raise
367 raise
368 self.sock.listen(5)
368 self.sock.listen(5)
369 self.fileno = self.sock.fileno
369 self.fileno = self.sock.fileno
370
370
371 def answer_stat_query(self, cs):
371 def answer_stat_query(self, cs):
372 names = cs.read().split('\0')
372 names = cs.read().split('\0')
373
373
374 states = names.pop()
374 states = names.pop()
375
375
376 self.ui.note(_('answering query for %r\n') % states)
376 self.ui.note(_('answering query for %r\n') % states)
377
377
378 visited = set()
378 visited = set()
379 if not names:
379 if not names:
380 def genresult(states, tree):
380 def genresult(states, tree):
381 for fn, state in tree.walk(states):
381 for fn, state in tree.walk(states):
382 yield fn
382 yield fn
383 else:
383 else:
384 def genresult(states, tree):
384 def genresult(states, tree):
385 for fn in names:
385 for fn in names:
386 for f in tree.lookup(states, fn, visited):
386 for f in tree.lookup(states, fn, visited):
387 yield f
387 yield f
388
388
389 return ['\0'.join(r) for r in [
389 return ['\0'.join(r) for r in [
390 genresult('l', self.repowatcher.statustrees['l']),
390 genresult('l', self.repowatcher.statustrees['l']),
391 genresult('m', self.repowatcher.statustrees['m']),
391 genresult('m', self.repowatcher.statustrees['m']),
392 genresult('a', self.repowatcher.statustrees['a']),
392 genresult('a', self.repowatcher.statustrees['a']),
393 genresult('r', self.repowatcher.statustrees['r']),
393 genresult('r', self.repowatcher.statustrees['r']),
394 genresult('!', self.repowatcher.statustrees['!']),
394 genresult('!', self.repowatcher.statustrees['!']),
395 '?' in states
395 '?' in states
396 and genresult('?', self.repowatcher.statustrees['?'])
396 and genresult('?', self.repowatcher.statustrees['?'])
397 or [],
397 or [],
398 [],
398 [],
399 'c' in states and genresult('n', self.repowatcher.tree) or [],
399 'c' in states and genresult('n', self.repowatcher.tree) or [],
400 visited
400 visited
401 ]]
401 ]]
402
402
403 def answer_dbug_query(self):
403 def answer_dbug_query(self):
404 return ['\0'.join(self.repowatcher.debug())]
404 return ['\0'.join(self.repowatcher.debug())]
405
405
406 def accept_connection(self):
406 def accept_connection(self):
407 sock, addr = self.sock.accept()
407 sock, addr = self.sock.accept()
408
408
409 cs = common.recvcs(sock)
409 cs = common.recvcs(sock)
410 version = ord(cs.read(1))
410 version = ord(cs.read(1))
411
411
412 if version != common.version:
412 if version != common.version:
413 self.ui.warn(_('received query from incompatible client '
413 self.ui.warn(_('received query from incompatible client '
414 'version %d\n') % version)
414 'version %d\n') % version)
415 try:
415 try:
416 # try to send back our version to the client
416 # try to send back our version to the client
417 # this way, the client too is informed of the mismatch
417 # this way, the client too is informed of the mismatch
418 sock.sendall(chr(common.version))
418 sock.sendall(chr(common.version))
419 except:
419 except socket.error:
420 pass
420 pass
421 return
421 return
422
422
423 type = cs.read(4)
423 type = cs.read(4)
424
424
425 if type == 'STAT':
425 if type == 'STAT':
426 results = self.answer_stat_query(cs)
426 results = self.answer_stat_query(cs)
427 elif type == 'DBUG':
427 elif type == 'DBUG':
428 results = self.answer_dbug_query()
428 results = self.answer_dbug_query()
429 else:
429 else:
430 self.ui.warn(_('unrecognized query type: %s\n') % type)
430 self.ui.warn(_('unrecognized query type: %s\n') % type)
431 return
431 return
432
432
433 try:
433 try:
434 try:
434 try:
435 v = chr(common.version)
435 v = chr(common.version)
436
436
437 sock.sendall(v + type + struct.pack(common.resphdrfmts[type],
437 sock.sendall(v + type + struct.pack(common.resphdrfmts[type],
438 *map(len, results)))
438 *map(len, results)))
439 sock.sendall(''.join(results))
439 sock.sendall(''.join(results))
440 finally:
440 finally:
441 sock.shutdown(socket.SHUT_WR)
441 sock.shutdown(socket.SHUT_WR)
442 except socket.error, err:
442 except socket.error, err:
443 if err.args[0] != errno.EPIPE:
443 if err.args[0] != errno.EPIPE:
444 raise
444 raise
445
445
446 if sys.platform.startswith('linux'):
446 if sys.platform.startswith('linux'):
447 import linuxserver as _server
447 import linuxserver as _server
448 else:
448 else:
449 raise ImportError
449 raise ImportError
450
450
451 master = _server.master
451 master = _server.master
452
452
453 def start(ui, dirstate, root, opts):
453 def start(ui, dirstate, root, opts):
454 timeout = opts.get('idle_timeout')
454 timeout = opts.get('idle_timeout')
455 if timeout:
455 if timeout:
456 timeout = float(timeout) * 60000
456 timeout = float(timeout) * 60000
457 else:
457 else:
458 timeout = None
458 timeout = None
459
459
460 class service(object):
460 class service(object):
461 def init(self):
461 def init(self):
462 try:
462 try:
463 self.master = master(ui, dirstate, root, timeout)
463 self.master = master(ui, dirstate, root, timeout)
464 except AlreadyStartedException, inst:
464 except AlreadyStartedException, inst:
465 raise util.Abort("inotify-server: %s" % inst)
465 raise util.Abort("inotify-server: %s" % inst)
466
466
467 def run(self):
467 def run(self):
468 try:
468 try:
469 try:
469 try:
470 self.master.run()
470 self.master.run()
471 except TimeoutException:
471 except TimeoutException:
472 pass
472 pass
473 finally:
473 finally:
474 self.master.shutdown()
474 self.master.shutdown()
475
475
476 if 'inserve' not in sys.argv:
476 if 'inserve' not in sys.argv:
477 runargs = util.hgcmd() + ['inserve', '-R', root]
477 runargs = util.hgcmd() + ['inserve', '-R', root]
478 else:
478 else:
479 runargs = util.hgcmd() + sys.argv[1:]
479 runargs = util.hgcmd() + sys.argv[1:]
480
480
481 pidfile = ui.config('inotify', 'pidfile')
481 pidfile = ui.config('inotify', 'pidfile')
482 if opts['daemon'] and pidfile is not None and 'pid-file' not in runargs:
482 if opts['daemon'] and pidfile is not None and 'pid-file' not in runargs:
483 runargs.append("--pid-file=%s" % pidfile)
483 runargs.append("--pid-file=%s" % pidfile)
484
484
485 service = service()
485 service = service()
486 logfile = ui.config('inotify', 'log')
486 logfile = ui.config('inotify', 'log')
487
487
488 appendpid = ui.configbool('inotify', 'appendpid', False)
488 appendpid = ui.configbool('inotify', 'appendpid', False)
489
489
490 ui.debug('starting inotify server: %s\n' % ' '.join(runargs))
490 ui.debug('starting inotify server: %s\n' % ' '.join(runargs))
491 cmdutil.service(opts, initfn=service.init, runfn=service.run,
491 cmdutil.service(opts, initfn=service.init, runfn=service.run,
492 logfile=logfile, runargs=runargs, appendpid=appendpid)
492 logfile=logfile, runargs=runargs, appendpid=appendpid)
@@ -1,3533 +1,3533 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help command` for more details)::
17 Common tasks (use :hg:`help command` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behaviour can be configured with::
31 files creations or deletions. This behaviour can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 It may be desirable for mq changesets to be kept in the secret phase (see
41 It may be desirable for mq changesets to be kept in the secret phase (see
42 :hg:`help phases`), which can be enabled with the following setting::
42 :hg:`help phases`), which can be enabled with the following setting::
43
43
44 [mq]
44 [mq]
45 secret = True
45 secret = True
46
46
47 You will by default be managing a patch queue named "patches". You can
47 You will by default be managing a patch queue named "patches". You can
48 create other, independent patch queues with the :hg:`qqueue` command.
48 create other, independent patch queues with the :hg:`qqueue` command.
49
49
50 If the working directory contains uncommitted files, qpush, qpop and
50 If the working directory contains uncommitted files, qpush, qpop and
51 qgoto abort immediately. If -f/--force is used, the changes are
51 qgoto abort immediately. If -f/--force is used, the changes are
52 discarded. Setting:
52 discarded. Setting:
53
53
54 [mq]
54 [mq]
55 check = True
55 check = True
56
56
57 make them behave as if -c/--check were passed, and non-conflicting
57 make them behave as if -c/--check were passed, and non-conflicting
58 local changes will be tolerated and preserved. If incompatible options
58 local changes will be tolerated and preserved. If incompatible options
59 such as -f/--force or --exact are passed, this setting is ignored.
59 such as -f/--force or --exact are passed, this setting is ignored.
60 '''
60 '''
61
61
62 from mercurial.i18n import _
62 from mercurial.i18n import _
63 from mercurial.node import bin, hex, short, nullid, nullrev
63 from mercurial.node import bin, hex, short, nullid, nullrev
64 from mercurial.lock import release
64 from mercurial.lock import release
65 from mercurial import commands, cmdutil, hg, scmutil, util, revset
65 from mercurial import commands, cmdutil, hg, scmutil, util, revset
66 from mercurial import repair, extensions, url, error, phases
66 from mercurial import repair, extensions, url, error, phases
67 from mercurial import patch as patchmod
67 from mercurial import patch as patchmod
68 import os, re, errno, shutil
68 import os, re, errno, shutil
69
69
70 commands.norepo += " qclone"
70 commands.norepo += " qclone"
71
71
72 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
72 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
73
73
74 cmdtable = {}
74 cmdtable = {}
75 command = cmdutil.command(cmdtable)
75 command = cmdutil.command(cmdtable)
76
76
77 # Patch names looks like unix-file names.
77 # Patch names looks like unix-file names.
78 # They must be joinable with queue directory and result in the patch path.
78 # They must be joinable with queue directory and result in the patch path.
79 normname = util.normpath
79 normname = util.normpath
80
80
81 class statusentry(object):
81 class statusentry(object):
82 def __init__(self, node, name):
82 def __init__(self, node, name):
83 self.node, self.name = node, name
83 self.node, self.name = node, name
84 def __repr__(self):
84 def __repr__(self):
85 return hex(self.node) + ':' + self.name
85 return hex(self.node) + ':' + self.name
86
86
87 class patchheader(object):
87 class patchheader(object):
88 def __init__(self, pf, plainmode=False):
88 def __init__(self, pf, plainmode=False):
89 def eatdiff(lines):
89 def eatdiff(lines):
90 while lines:
90 while lines:
91 l = lines[-1]
91 l = lines[-1]
92 if (l.startswith("diff -") or
92 if (l.startswith("diff -") or
93 l.startswith("Index:") or
93 l.startswith("Index:") or
94 l.startswith("===========")):
94 l.startswith("===========")):
95 del lines[-1]
95 del lines[-1]
96 else:
96 else:
97 break
97 break
98 def eatempty(lines):
98 def eatempty(lines):
99 while lines:
99 while lines:
100 if not lines[-1].strip():
100 if not lines[-1].strip():
101 del lines[-1]
101 del lines[-1]
102 else:
102 else:
103 break
103 break
104
104
105 message = []
105 message = []
106 comments = []
106 comments = []
107 user = None
107 user = None
108 date = None
108 date = None
109 parent = None
109 parent = None
110 format = None
110 format = None
111 subject = None
111 subject = None
112 branch = None
112 branch = None
113 nodeid = None
113 nodeid = None
114 diffstart = 0
114 diffstart = 0
115
115
116 for line in file(pf):
116 for line in file(pf):
117 line = line.rstrip()
117 line = line.rstrip()
118 if (line.startswith('diff --git')
118 if (line.startswith('diff --git')
119 or (diffstart and line.startswith('+++ '))):
119 or (diffstart and line.startswith('+++ '))):
120 diffstart = 2
120 diffstart = 2
121 break
121 break
122 diffstart = 0 # reset
122 diffstart = 0 # reset
123 if line.startswith("--- "):
123 if line.startswith("--- "):
124 diffstart = 1
124 diffstart = 1
125 continue
125 continue
126 elif format == "hgpatch":
126 elif format == "hgpatch":
127 # parse values when importing the result of an hg export
127 # parse values when importing the result of an hg export
128 if line.startswith("# User "):
128 if line.startswith("# User "):
129 user = line[7:]
129 user = line[7:]
130 elif line.startswith("# Date "):
130 elif line.startswith("# Date "):
131 date = line[7:]
131 date = line[7:]
132 elif line.startswith("# Parent "):
132 elif line.startswith("# Parent "):
133 parent = line[9:].lstrip()
133 parent = line[9:].lstrip()
134 elif line.startswith("# Branch "):
134 elif line.startswith("# Branch "):
135 branch = line[9:]
135 branch = line[9:]
136 elif line.startswith("# Node ID "):
136 elif line.startswith("# Node ID "):
137 nodeid = line[10:]
137 nodeid = line[10:]
138 elif not line.startswith("# ") and line:
138 elif not line.startswith("# ") and line:
139 message.append(line)
139 message.append(line)
140 format = None
140 format = None
141 elif line == '# HG changeset patch':
141 elif line == '# HG changeset patch':
142 message = []
142 message = []
143 format = "hgpatch"
143 format = "hgpatch"
144 elif (format != "tagdone" and (line.startswith("Subject: ") or
144 elif (format != "tagdone" and (line.startswith("Subject: ") or
145 line.startswith("subject: "))):
145 line.startswith("subject: "))):
146 subject = line[9:]
146 subject = line[9:]
147 format = "tag"
147 format = "tag"
148 elif (format != "tagdone" and (line.startswith("From: ") or
148 elif (format != "tagdone" and (line.startswith("From: ") or
149 line.startswith("from: "))):
149 line.startswith("from: "))):
150 user = line[6:]
150 user = line[6:]
151 format = "tag"
151 format = "tag"
152 elif (format != "tagdone" and (line.startswith("Date: ") or
152 elif (format != "tagdone" and (line.startswith("Date: ") or
153 line.startswith("date: "))):
153 line.startswith("date: "))):
154 date = line[6:]
154 date = line[6:]
155 format = "tag"
155 format = "tag"
156 elif format == "tag" and line == "":
156 elif format == "tag" and line == "":
157 # when looking for tags (subject: from: etc) they
157 # when looking for tags (subject: from: etc) they
158 # end once you find a blank line in the source
158 # end once you find a blank line in the source
159 format = "tagdone"
159 format = "tagdone"
160 elif message or line:
160 elif message or line:
161 message.append(line)
161 message.append(line)
162 comments.append(line)
162 comments.append(line)
163
163
164 eatdiff(message)
164 eatdiff(message)
165 eatdiff(comments)
165 eatdiff(comments)
166 # Remember the exact starting line of the patch diffs before consuming
166 # Remember the exact starting line of the patch diffs before consuming
167 # empty lines, for external use by TortoiseHg and others
167 # empty lines, for external use by TortoiseHg and others
168 self.diffstartline = len(comments)
168 self.diffstartline = len(comments)
169 eatempty(message)
169 eatempty(message)
170 eatempty(comments)
170 eatempty(comments)
171
171
172 # make sure message isn't empty
172 # make sure message isn't empty
173 if format and format.startswith("tag") and subject:
173 if format and format.startswith("tag") and subject:
174 message.insert(0, "")
174 message.insert(0, "")
175 message.insert(0, subject)
175 message.insert(0, subject)
176
176
177 self.message = message
177 self.message = message
178 self.comments = comments
178 self.comments = comments
179 self.user = user
179 self.user = user
180 self.date = date
180 self.date = date
181 self.parent = parent
181 self.parent = parent
182 # nodeid and branch are for external use by TortoiseHg and others
182 # nodeid and branch are for external use by TortoiseHg and others
183 self.nodeid = nodeid
183 self.nodeid = nodeid
184 self.branch = branch
184 self.branch = branch
185 self.haspatch = diffstart > 1
185 self.haspatch = diffstart > 1
186 self.plainmode = plainmode
186 self.plainmode = plainmode
187
187
188 def setuser(self, user):
188 def setuser(self, user):
189 if not self.updateheader(['From: ', '# User '], user):
189 if not self.updateheader(['From: ', '# User '], user):
190 try:
190 try:
191 patchheaderat = self.comments.index('# HG changeset patch')
191 patchheaderat = self.comments.index('# HG changeset patch')
192 self.comments.insert(patchheaderat + 1, '# User ' + user)
192 self.comments.insert(patchheaderat + 1, '# User ' + user)
193 except ValueError:
193 except ValueError:
194 if self.plainmode or self._hasheader(['Date: ']):
194 if self.plainmode or self._hasheader(['Date: ']):
195 self.comments = ['From: ' + user] + self.comments
195 self.comments = ['From: ' + user] + self.comments
196 else:
196 else:
197 tmp = ['# HG changeset patch', '# User ' + user, '']
197 tmp = ['# HG changeset patch', '# User ' + user, '']
198 self.comments = tmp + self.comments
198 self.comments = tmp + self.comments
199 self.user = user
199 self.user = user
200
200
201 def setdate(self, date):
201 def setdate(self, date):
202 if not self.updateheader(['Date: ', '# Date '], date):
202 if not self.updateheader(['Date: ', '# Date '], date):
203 try:
203 try:
204 patchheaderat = self.comments.index('# HG changeset patch')
204 patchheaderat = self.comments.index('# HG changeset patch')
205 self.comments.insert(patchheaderat + 1, '# Date ' + date)
205 self.comments.insert(patchheaderat + 1, '# Date ' + date)
206 except ValueError:
206 except ValueError:
207 if self.plainmode or self._hasheader(['From: ']):
207 if self.plainmode or self._hasheader(['From: ']):
208 self.comments = ['Date: ' + date] + self.comments
208 self.comments = ['Date: ' + date] + self.comments
209 else:
209 else:
210 tmp = ['# HG changeset patch', '# Date ' + date, '']
210 tmp = ['# HG changeset patch', '# Date ' + date, '']
211 self.comments = tmp + self.comments
211 self.comments = tmp + self.comments
212 self.date = date
212 self.date = date
213
213
214 def setparent(self, parent):
214 def setparent(self, parent):
215 if not self.updateheader(['# Parent '], parent):
215 if not self.updateheader(['# Parent '], parent):
216 try:
216 try:
217 patchheaderat = self.comments.index('# HG changeset patch')
217 patchheaderat = self.comments.index('# HG changeset patch')
218 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
218 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
219 except ValueError:
219 except ValueError:
220 pass
220 pass
221 self.parent = parent
221 self.parent = parent
222
222
223 def setmessage(self, message):
223 def setmessage(self, message):
224 if self.comments:
224 if self.comments:
225 self._delmsg()
225 self._delmsg()
226 self.message = [message]
226 self.message = [message]
227 self.comments += self.message
227 self.comments += self.message
228
228
229 def updateheader(self, prefixes, new):
229 def updateheader(self, prefixes, new):
230 '''Update all references to a field in the patch header.
230 '''Update all references to a field in the patch header.
231 Return whether the field is present.'''
231 Return whether the field is present.'''
232 res = False
232 res = False
233 for prefix in prefixes:
233 for prefix in prefixes:
234 for i in xrange(len(self.comments)):
234 for i in xrange(len(self.comments)):
235 if self.comments[i].startswith(prefix):
235 if self.comments[i].startswith(prefix):
236 self.comments[i] = prefix + new
236 self.comments[i] = prefix + new
237 res = True
237 res = True
238 break
238 break
239 return res
239 return res
240
240
241 def _hasheader(self, prefixes):
241 def _hasheader(self, prefixes):
242 '''Check if a header starts with any of the given prefixes.'''
242 '''Check if a header starts with any of the given prefixes.'''
243 for prefix in prefixes:
243 for prefix in prefixes:
244 for comment in self.comments:
244 for comment in self.comments:
245 if comment.startswith(prefix):
245 if comment.startswith(prefix):
246 return True
246 return True
247 return False
247 return False
248
248
249 def __str__(self):
249 def __str__(self):
250 if not self.comments:
250 if not self.comments:
251 return ''
251 return ''
252 return '\n'.join(self.comments) + '\n\n'
252 return '\n'.join(self.comments) + '\n\n'
253
253
254 def _delmsg(self):
254 def _delmsg(self):
255 '''Remove existing message, keeping the rest of the comments fields.
255 '''Remove existing message, keeping the rest of the comments fields.
256 If comments contains 'subject: ', message will prepend
256 If comments contains 'subject: ', message will prepend
257 the field and a blank line.'''
257 the field and a blank line.'''
258 if self.message:
258 if self.message:
259 subj = 'subject: ' + self.message[0].lower()
259 subj = 'subject: ' + self.message[0].lower()
260 for i in xrange(len(self.comments)):
260 for i in xrange(len(self.comments)):
261 if subj == self.comments[i].lower():
261 if subj == self.comments[i].lower():
262 del self.comments[i]
262 del self.comments[i]
263 self.message = self.message[2:]
263 self.message = self.message[2:]
264 break
264 break
265 ci = 0
265 ci = 0
266 for mi in self.message:
266 for mi in self.message:
267 while mi != self.comments[ci]:
267 while mi != self.comments[ci]:
268 ci += 1
268 ci += 1
269 del self.comments[ci]
269 del self.comments[ci]
270
270
271 def newcommit(repo, phase, *args, **kwargs):
271 def newcommit(repo, phase, *args, **kwargs):
272 """helper dedicated to ensure a commit respect mq.secret setting
272 """helper dedicated to ensure a commit respect mq.secret setting
273
273
274 It should be used instead of repo.commit inside the mq source for operation
274 It should be used instead of repo.commit inside the mq source for operation
275 creating new changeset.
275 creating new changeset.
276 """
276 """
277 if phase is None:
277 if phase is None:
278 if repo.ui.configbool('mq', 'secret', False):
278 if repo.ui.configbool('mq', 'secret', False):
279 phase = phases.secret
279 phase = phases.secret
280 if phase is not None:
280 if phase is not None:
281 backup = repo.ui.backupconfig('phases', 'new-commit')
281 backup = repo.ui.backupconfig('phases', 'new-commit')
282 # Marking the repository as committing an mq patch can be used
282 # Marking the repository as committing an mq patch can be used
283 # to optimize operations like _branchtags().
283 # to optimize operations like _branchtags().
284 repo._committingpatch = True
284 repo._committingpatch = True
285 try:
285 try:
286 if phase is not None:
286 if phase is not None:
287 repo.ui.setconfig('phases', 'new-commit', phase)
287 repo.ui.setconfig('phases', 'new-commit', phase)
288 return repo.commit(*args, **kwargs)
288 return repo.commit(*args, **kwargs)
289 finally:
289 finally:
290 repo._committingpatch = False
290 repo._committingpatch = False
291 if phase is not None:
291 if phase is not None:
292 repo.ui.restoreconfig(backup)
292 repo.ui.restoreconfig(backup)
293
293
294 class AbortNoCleanup(error.Abort):
294 class AbortNoCleanup(error.Abort):
295 pass
295 pass
296
296
297 class queue(object):
297 class queue(object):
298 def __init__(self, ui, path, patchdir=None):
298 def __init__(self, ui, path, patchdir=None):
299 self.basepath = path
299 self.basepath = path
300 try:
300 try:
301 fh = open(os.path.join(path, 'patches.queue'))
301 fh = open(os.path.join(path, 'patches.queue'))
302 cur = fh.read().rstrip()
302 cur = fh.read().rstrip()
303 fh.close()
303 fh.close()
304 if not cur:
304 if not cur:
305 curpath = os.path.join(path, 'patches')
305 curpath = os.path.join(path, 'patches')
306 else:
306 else:
307 curpath = os.path.join(path, 'patches-' + cur)
307 curpath = os.path.join(path, 'patches-' + cur)
308 except IOError:
308 except IOError:
309 curpath = os.path.join(path, 'patches')
309 curpath = os.path.join(path, 'patches')
310 self.path = patchdir or curpath
310 self.path = patchdir or curpath
311 self.opener = scmutil.opener(self.path)
311 self.opener = scmutil.opener(self.path)
312 self.ui = ui
312 self.ui = ui
313 self.applieddirty = False
313 self.applieddirty = False
314 self.seriesdirty = False
314 self.seriesdirty = False
315 self.added = []
315 self.added = []
316 self.seriespath = "series"
316 self.seriespath = "series"
317 self.statuspath = "status"
317 self.statuspath = "status"
318 self.guardspath = "guards"
318 self.guardspath = "guards"
319 self.activeguards = None
319 self.activeguards = None
320 self.guardsdirty = False
320 self.guardsdirty = False
321 # Handle mq.git as a bool with extended values
321 # Handle mq.git as a bool with extended values
322 try:
322 try:
323 gitmode = ui.configbool('mq', 'git', None)
323 gitmode = ui.configbool('mq', 'git', None)
324 if gitmode is None:
324 if gitmode is None:
325 raise error.ConfigError
325 raise error.ConfigError
326 self.gitmode = gitmode and 'yes' or 'no'
326 self.gitmode = gitmode and 'yes' or 'no'
327 except error.ConfigError:
327 except error.ConfigError:
328 self.gitmode = ui.config('mq', 'git', 'auto').lower()
328 self.gitmode = ui.config('mq', 'git', 'auto').lower()
329 self.plainmode = ui.configbool('mq', 'plain', False)
329 self.plainmode = ui.configbool('mq', 'plain', False)
330
330
331 @util.propertycache
331 @util.propertycache
332 def applied(self):
332 def applied(self):
333 def parselines(lines):
333 def parselines(lines):
334 for l in lines:
334 for l in lines:
335 entry = l.split(':', 1)
335 entry = l.split(':', 1)
336 if len(entry) > 1:
336 if len(entry) > 1:
337 n, name = entry
337 n, name = entry
338 yield statusentry(bin(n), name)
338 yield statusentry(bin(n), name)
339 elif l.strip():
339 elif l.strip():
340 self.ui.warn(_('malformated mq status line: %s\n') % entry)
340 self.ui.warn(_('malformated mq status line: %s\n') % entry)
341 # else we ignore empty lines
341 # else we ignore empty lines
342 try:
342 try:
343 lines = self.opener.read(self.statuspath).splitlines()
343 lines = self.opener.read(self.statuspath).splitlines()
344 return list(parselines(lines))
344 return list(parselines(lines))
345 except IOError, e:
345 except IOError, e:
346 if e.errno == errno.ENOENT:
346 if e.errno == errno.ENOENT:
347 return []
347 return []
348 raise
348 raise
349
349
350 @util.propertycache
350 @util.propertycache
351 def fullseries(self):
351 def fullseries(self):
352 try:
352 try:
353 return self.opener.read(self.seriespath).splitlines()
353 return self.opener.read(self.seriespath).splitlines()
354 except IOError, e:
354 except IOError, e:
355 if e.errno == errno.ENOENT:
355 if e.errno == errno.ENOENT:
356 return []
356 return []
357 raise
357 raise
358
358
359 @util.propertycache
359 @util.propertycache
360 def series(self):
360 def series(self):
361 self.parseseries()
361 self.parseseries()
362 return self.series
362 return self.series
363
363
364 @util.propertycache
364 @util.propertycache
365 def seriesguards(self):
365 def seriesguards(self):
366 self.parseseries()
366 self.parseseries()
367 return self.seriesguards
367 return self.seriesguards
368
368
369 def invalidate(self):
369 def invalidate(self):
370 for a in 'applied fullseries series seriesguards'.split():
370 for a in 'applied fullseries series seriesguards'.split():
371 if a in self.__dict__:
371 if a in self.__dict__:
372 delattr(self, a)
372 delattr(self, a)
373 self.applieddirty = False
373 self.applieddirty = False
374 self.seriesdirty = False
374 self.seriesdirty = False
375 self.guardsdirty = False
375 self.guardsdirty = False
376 self.activeguards = None
376 self.activeguards = None
377
377
378 def diffopts(self, opts={}, patchfn=None):
378 def diffopts(self, opts={}, patchfn=None):
379 diffopts = patchmod.diffopts(self.ui, opts)
379 diffopts = patchmod.diffopts(self.ui, opts)
380 if self.gitmode == 'auto':
380 if self.gitmode == 'auto':
381 diffopts.upgrade = True
381 diffopts.upgrade = True
382 elif self.gitmode == 'keep':
382 elif self.gitmode == 'keep':
383 pass
383 pass
384 elif self.gitmode in ('yes', 'no'):
384 elif self.gitmode in ('yes', 'no'):
385 diffopts.git = self.gitmode == 'yes'
385 diffopts.git = self.gitmode == 'yes'
386 else:
386 else:
387 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
387 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
388 ' got %s') % self.gitmode)
388 ' got %s') % self.gitmode)
389 if patchfn:
389 if patchfn:
390 diffopts = self.patchopts(diffopts, patchfn)
390 diffopts = self.patchopts(diffopts, patchfn)
391 return diffopts
391 return diffopts
392
392
393 def patchopts(self, diffopts, *patches):
393 def patchopts(self, diffopts, *patches):
394 """Return a copy of input diff options with git set to true if
394 """Return a copy of input diff options with git set to true if
395 referenced patch is a git patch and should be preserved as such.
395 referenced patch is a git patch and should be preserved as such.
396 """
396 """
397 diffopts = diffopts.copy()
397 diffopts = diffopts.copy()
398 if not diffopts.git and self.gitmode == 'keep':
398 if not diffopts.git and self.gitmode == 'keep':
399 for patchfn in patches:
399 for patchfn in patches:
400 patchf = self.opener(patchfn, 'r')
400 patchf = self.opener(patchfn, 'r')
401 # if the patch was a git patch, refresh it as a git patch
401 # if the patch was a git patch, refresh it as a git patch
402 for line in patchf:
402 for line in patchf:
403 if line.startswith('diff --git'):
403 if line.startswith('diff --git'):
404 diffopts.git = True
404 diffopts.git = True
405 break
405 break
406 patchf.close()
406 patchf.close()
407 return diffopts
407 return diffopts
408
408
409 def join(self, *p):
409 def join(self, *p):
410 return os.path.join(self.path, *p)
410 return os.path.join(self.path, *p)
411
411
412 def findseries(self, patch):
412 def findseries(self, patch):
413 def matchpatch(l):
413 def matchpatch(l):
414 l = l.split('#', 1)[0]
414 l = l.split('#', 1)[0]
415 return l.strip() == patch
415 return l.strip() == patch
416 for index, l in enumerate(self.fullseries):
416 for index, l in enumerate(self.fullseries):
417 if matchpatch(l):
417 if matchpatch(l):
418 return index
418 return index
419 return None
419 return None
420
420
421 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
421 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
422
422
423 def parseseries(self):
423 def parseseries(self):
424 self.series = []
424 self.series = []
425 self.seriesguards = []
425 self.seriesguards = []
426 for l in self.fullseries:
426 for l in self.fullseries:
427 h = l.find('#')
427 h = l.find('#')
428 if h == -1:
428 if h == -1:
429 patch = l
429 patch = l
430 comment = ''
430 comment = ''
431 elif h == 0:
431 elif h == 0:
432 continue
432 continue
433 else:
433 else:
434 patch = l[:h]
434 patch = l[:h]
435 comment = l[h:]
435 comment = l[h:]
436 patch = patch.strip()
436 patch = patch.strip()
437 if patch:
437 if patch:
438 if patch in self.series:
438 if patch in self.series:
439 raise util.Abort(_('%s appears more than once in %s') %
439 raise util.Abort(_('%s appears more than once in %s') %
440 (patch, self.join(self.seriespath)))
440 (patch, self.join(self.seriespath)))
441 self.series.append(patch)
441 self.series.append(patch)
442 self.seriesguards.append(self.guard_re.findall(comment))
442 self.seriesguards.append(self.guard_re.findall(comment))
443
443
444 def checkguard(self, guard):
444 def checkguard(self, guard):
445 if not guard:
445 if not guard:
446 return _('guard cannot be an empty string')
446 return _('guard cannot be an empty string')
447 bad_chars = '# \t\r\n\f'
447 bad_chars = '# \t\r\n\f'
448 first = guard[0]
448 first = guard[0]
449 if first in '-+':
449 if first in '-+':
450 return (_('guard %r starts with invalid character: %r') %
450 return (_('guard %r starts with invalid character: %r') %
451 (guard, first))
451 (guard, first))
452 for c in bad_chars:
452 for c in bad_chars:
453 if c in guard:
453 if c in guard:
454 return _('invalid character in guard %r: %r') % (guard, c)
454 return _('invalid character in guard %r: %r') % (guard, c)
455
455
456 def setactive(self, guards):
456 def setactive(self, guards):
457 for guard in guards:
457 for guard in guards:
458 bad = self.checkguard(guard)
458 bad = self.checkguard(guard)
459 if bad:
459 if bad:
460 raise util.Abort(bad)
460 raise util.Abort(bad)
461 guards = sorted(set(guards))
461 guards = sorted(set(guards))
462 self.ui.debug('active guards: %s\n' % ' '.join(guards))
462 self.ui.debug('active guards: %s\n' % ' '.join(guards))
463 self.activeguards = guards
463 self.activeguards = guards
464 self.guardsdirty = True
464 self.guardsdirty = True
465
465
466 def active(self):
466 def active(self):
467 if self.activeguards is None:
467 if self.activeguards is None:
468 self.activeguards = []
468 self.activeguards = []
469 try:
469 try:
470 guards = self.opener.read(self.guardspath).split()
470 guards = self.opener.read(self.guardspath).split()
471 except IOError, err:
471 except IOError, err:
472 if err.errno != errno.ENOENT:
472 if err.errno != errno.ENOENT:
473 raise
473 raise
474 guards = []
474 guards = []
475 for i, guard in enumerate(guards):
475 for i, guard in enumerate(guards):
476 bad = self.checkguard(guard)
476 bad = self.checkguard(guard)
477 if bad:
477 if bad:
478 self.ui.warn('%s:%d: %s\n' %
478 self.ui.warn('%s:%d: %s\n' %
479 (self.join(self.guardspath), i + 1, bad))
479 (self.join(self.guardspath), i + 1, bad))
480 else:
480 else:
481 self.activeguards.append(guard)
481 self.activeguards.append(guard)
482 return self.activeguards
482 return self.activeguards
483
483
484 def setguards(self, idx, guards):
484 def setguards(self, idx, guards):
485 for g in guards:
485 for g in guards:
486 if len(g) < 2:
486 if len(g) < 2:
487 raise util.Abort(_('guard %r too short') % g)
487 raise util.Abort(_('guard %r too short') % g)
488 if g[0] not in '-+':
488 if g[0] not in '-+':
489 raise util.Abort(_('guard %r starts with invalid char') % g)
489 raise util.Abort(_('guard %r starts with invalid char') % g)
490 bad = self.checkguard(g[1:])
490 bad = self.checkguard(g[1:])
491 if bad:
491 if bad:
492 raise util.Abort(bad)
492 raise util.Abort(bad)
493 drop = self.guard_re.sub('', self.fullseries[idx])
493 drop = self.guard_re.sub('', self.fullseries[idx])
494 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
494 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
495 self.parseseries()
495 self.parseseries()
496 self.seriesdirty = True
496 self.seriesdirty = True
497
497
498 def pushable(self, idx):
498 def pushable(self, idx):
499 if isinstance(idx, str):
499 if isinstance(idx, str):
500 idx = self.series.index(idx)
500 idx = self.series.index(idx)
501 patchguards = self.seriesguards[idx]
501 patchguards = self.seriesguards[idx]
502 if not patchguards:
502 if not patchguards:
503 return True, None
503 return True, None
504 guards = self.active()
504 guards = self.active()
505 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
505 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
506 if exactneg:
506 if exactneg:
507 return False, repr(exactneg[0])
507 return False, repr(exactneg[0])
508 pos = [g for g in patchguards if g[0] == '+']
508 pos = [g for g in patchguards if g[0] == '+']
509 exactpos = [g for g in pos if g[1:] in guards]
509 exactpos = [g for g in pos if g[1:] in guards]
510 if pos:
510 if pos:
511 if exactpos:
511 if exactpos:
512 return True, repr(exactpos[0])
512 return True, repr(exactpos[0])
513 return False, ' '.join(map(repr, pos))
513 return False, ' '.join(map(repr, pos))
514 return True, ''
514 return True, ''
515
515
516 def explainpushable(self, idx, all_patches=False):
516 def explainpushable(self, idx, all_patches=False):
517 write = all_patches and self.ui.write or self.ui.warn
517 write = all_patches and self.ui.write or self.ui.warn
518 if all_patches or self.ui.verbose:
518 if all_patches or self.ui.verbose:
519 if isinstance(idx, str):
519 if isinstance(idx, str):
520 idx = self.series.index(idx)
520 idx = self.series.index(idx)
521 pushable, why = self.pushable(idx)
521 pushable, why = self.pushable(idx)
522 if all_patches and pushable:
522 if all_patches and pushable:
523 if why is None:
523 if why is None:
524 write(_('allowing %s - no guards in effect\n') %
524 write(_('allowing %s - no guards in effect\n') %
525 self.series[idx])
525 self.series[idx])
526 else:
526 else:
527 if not why:
527 if not why:
528 write(_('allowing %s - no matching negative guards\n') %
528 write(_('allowing %s - no matching negative guards\n') %
529 self.series[idx])
529 self.series[idx])
530 else:
530 else:
531 write(_('allowing %s - guarded by %s\n') %
531 write(_('allowing %s - guarded by %s\n') %
532 (self.series[idx], why))
532 (self.series[idx], why))
533 if not pushable:
533 if not pushable:
534 if why:
534 if why:
535 write(_('skipping %s - guarded by %s\n') %
535 write(_('skipping %s - guarded by %s\n') %
536 (self.series[idx], why))
536 (self.series[idx], why))
537 else:
537 else:
538 write(_('skipping %s - no matching guards\n') %
538 write(_('skipping %s - no matching guards\n') %
539 self.series[idx])
539 self.series[idx])
540
540
541 def savedirty(self):
541 def savedirty(self):
542 def writelist(items, path):
542 def writelist(items, path):
543 fp = self.opener(path, 'w')
543 fp = self.opener(path, 'w')
544 for i in items:
544 for i in items:
545 fp.write("%s\n" % i)
545 fp.write("%s\n" % i)
546 fp.close()
546 fp.close()
547 if self.applieddirty:
547 if self.applieddirty:
548 writelist(map(str, self.applied), self.statuspath)
548 writelist(map(str, self.applied), self.statuspath)
549 self.applieddirty = False
549 self.applieddirty = False
550 if self.seriesdirty:
550 if self.seriesdirty:
551 writelist(self.fullseries, self.seriespath)
551 writelist(self.fullseries, self.seriespath)
552 self.seriesdirty = False
552 self.seriesdirty = False
553 if self.guardsdirty:
553 if self.guardsdirty:
554 writelist(self.activeguards, self.guardspath)
554 writelist(self.activeguards, self.guardspath)
555 self.guardsdirty = False
555 self.guardsdirty = False
556 if self.added:
556 if self.added:
557 qrepo = self.qrepo()
557 qrepo = self.qrepo()
558 if qrepo:
558 if qrepo:
559 qrepo[None].add(f for f in self.added if f not in qrepo[None])
559 qrepo[None].add(f for f in self.added if f not in qrepo[None])
560 self.added = []
560 self.added = []
561
561
562 def removeundo(self, repo):
562 def removeundo(self, repo):
563 undo = repo.sjoin('undo')
563 undo = repo.sjoin('undo')
564 if not os.path.exists(undo):
564 if not os.path.exists(undo):
565 return
565 return
566 try:
566 try:
567 os.unlink(undo)
567 os.unlink(undo)
568 except OSError, inst:
568 except OSError, inst:
569 self.ui.warn(_('error removing undo: %s\n') % str(inst))
569 self.ui.warn(_('error removing undo: %s\n') % str(inst))
570
570
571 def backup(self, repo, files, copy=False):
571 def backup(self, repo, files, copy=False):
572 # backup local changes in --force case
572 # backup local changes in --force case
573 for f in sorted(files):
573 for f in sorted(files):
574 absf = repo.wjoin(f)
574 absf = repo.wjoin(f)
575 if os.path.lexists(absf):
575 if os.path.lexists(absf):
576 self.ui.note(_('saving current version of %s as %s\n') %
576 self.ui.note(_('saving current version of %s as %s\n') %
577 (f, f + '.orig'))
577 (f, f + '.orig'))
578 if copy:
578 if copy:
579 util.copyfile(absf, absf + '.orig')
579 util.copyfile(absf, absf + '.orig')
580 else:
580 else:
581 util.rename(absf, absf + '.orig')
581 util.rename(absf, absf + '.orig')
582
582
583 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
583 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
584 fp=None, changes=None, opts={}):
584 fp=None, changes=None, opts={}):
585 stat = opts.get('stat')
585 stat = opts.get('stat')
586 m = scmutil.match(repo[node1], files, opts)
586 m = scmutil.match(repo[node1], files, opts)
587 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
587 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
588 changes, stat, fp)
588 changes, stat, fp)
589
589
590 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
590 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
591 # first try just applying the patch
591 # first try just applying the patch
592 (err, n) = self.apply(repo, [patch], update_status=False,
592 (err, n) = self.apply(repo, [patch], update_status=False,
593 strict=True, merge=rev)
593 strict=True, merge=rev)
594
594
595 if err == 0:
595 if err == 0:
596 return (err, n)
596 return (err, n)
597
597
598 if n is None:
598 if n is None:
599 raise util.Abort(_("apply failed for patch %s") % patch)
599 raise util.Abort(_("apply failed for patch %s") % patch)
600
600
601 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
601 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
602
602
603 # apply failed, strip away that rev and merge.
603 # apply failed, strip away that rev and merge.
604 hg.clean(repo, head)
604 hg.clean(repo, head)
605 self.strip(repo, [n], update=False, backup='strip')
605 self.strip(repo, [n], update=False, backup='strip')
606
606
607 ctx = repo[rev]
607 ctx = repo[rev]
608 ret = hg.merge(repo, rev)
608 ret = hg.merge(repo, rev)
609 if ret:
609 if ret:
610 raise util.Abort(_("update returned %d") % ret)
610 raise util.Abort(_("update returned %d") % ret)
611 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
611 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
612 if n is None:
612 if n is None:
613 raise util.Abort(_("repo commit failed"))
613 raise util.Abort(_("repo commit failed"))
614 try:
614 try:
615 ph = patchheader(mergeq.join(patch), self.plainmode)
615 ph = patchheader(mergeq.join(patch), self.plainmode)
616 except:
616 except:
617 raise util.Abort(_("unable to read %s") % patch)
617 raise util.Abort(_("unable to read %s") % patch)
618
618
619 diffopts = self.patchopts(diffopts, patch)
619 diffopts = self.patchopts(diffopts, patch)
620 patchf = self.opener(patch, "w")
620 patchf = self.opener(patch, "w")
621 comments = str(ph)
621 comments = str(ph)
622 if comments:
622 if comments:
623 patchf.write(comments)
623 patchf.write(comments)
624 self.printdiff(repo, diffopts, head, n, fp=patchf)
624 self.printdiff(repo, diffopts, head, n, fp=patchf)
625 patchf.close()
625 patchf.close()
626 self.removeundo(repo)
626 self.removeundo(repo)
627 return (0, n)
627 return (0, n)
628
628
629 def qparents(self, repo, rev=None):
629 def qparents(self, repo, rev=None):
630 if rev is None:
630 if rev is None:
631 (p1, p2) = repo.dirstate.parents()
631 (p1, p2) = repo.dirstate.parents()
632 if p2 == nullid:
632 if p2 == nullid:
633 return p1
633 return p1
634 if not self.applied:
634 if not self.applied:
635 return None
635 return None
636 return self.applied[-1].node
636 return self.applied[-1].node
637 p1, p2 = repo.changelog.parents(rev)
637 p1, p2 = repo.changelog.parents(rev)
638 if p2 != nullid and p2 in [x.node for x in self.applied]:
638 if p2 != nullid and p2 in [x.node for x in self.applied]:
639 return p2
639 return p2
640 return p1
640 return p1
641
641
642 def mergepatch(self, repo, mergeq, series, diffopts):
642 def mergepatch(self, repo, mergeq, series, diffopts):
643 if not self.applied:
643 if not self.applied:
644 # each of the patches merged in will have two parents. This
644 # each of the patches merged in will have two parents. This
645 # can confuse the qrefresh, qdiff, and strip code because it
645 # can confuse the qrefresh, qdiff, and strip code because it
646 # needs to know which parent is actually in the patch queue.
646 # needs to know which parent is actually in the patch queue.
647 # so, we insert a merge marker with only one parent. This way
647 # so, we insert a merge marker with only one parent. This way
648 # the first patch in the queue is never a merge patch
648 # the first patch in the queue is never a merge patch
649 #
649 #
650 pname = ".hg.patches.merge.marker"
650 pname = ".hg.patches.merge.marker"
651 n = newcommit(repo, None, '[mq]: merge marker', force=True)
651 n = newcommit(repo, None, '[mq]: merge marker', force=True)
652 self.removeundo(repo)
652 self.removeundo(repo)
653 self.applied.append(statusentry(n, pname))
653 self.applied.append(statusentry(n, pname))
654 self.applieddirty = True
654 self.applieddirty = True
655
655
656 head = self.qparents(repo)
656 head = self.qparents(repo)
657
657
658 for patch in series:
658 for patch in series:
659 patch = mergeq.lookup(patch, strict=True)
659 patch = mergeq.lookup(patch, strict=True)
660 if not patch:
660 if not patch:
661 self.ui.warn(_("patch %s does not exist\n") % patch)
661 self.ui.warn(_("patch %s does not exist\n") % patch)
662 return (1, None)
662 return (1, None)
663 pushable, reason = self.pushable(patch)
663 pushable, reason = self.pushable(patch)
664 if not pushable:
664 if not pushable:
665 self.explainpushable(patch, all_patches=True)
665 self.explainpushable(patch, all_patches=True)
666 continue
666 continue
667 info = mergeq.isapplied(patch)
667 info = mergeq.isapplied(patch)
668 if not info:
668 if not info:
669 self.ui.warn(_("patch %s is not applied\n") % patch)
669 self.ui.warn(_("patch %s is not applied\n") % patch)
670 return (1, None)
670 return (1, None)
671 rev = info[1]
671 rev = info[1]
672 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
672 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
673 if head:
673 if head:
674 self.applied.append(statusentry(head, patch))
674 self.applied.append(statusentry(head, patch))
675 self.applieddirty = True
675 self.applieddirty = True
676 if err:
676 if err:
677 return (err, head)
677 return (err, head)
678 self.savedirty()
678 self.savedirty()
679 return (0, head)
679 return (0, head)
680
680
681 def patch(self, repo, patchfile):
681 def patch(self, repo, patchfile):
682 '''Apply patchfile to the working directory.
682 '''Apply patchfile to the working directory.
683 patchfile: name of patch file'''
683 patchfile: name of patch file'''
684 files = set()
684 files = set()
685 try:
685 try:
686 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
686 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
687 files=files, eolmode=None)
687 files=files, eolmode=None)
688 return (True, list(files), fuzz)
688 return (True, list(files), fuzz)
689 except Exception, inst:
689 except Exception, inst:
690 self.ui.note(str(inst) + '\n')
690 self.ui.note(str(inst) + '\n')
691 if not self.ui.verbose:
691 if not self.ui.verbose:
692 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
692 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
693 self.ui.traceback()
693 self.ui.traceback()
694 return (False, list(files), False)
694 return (False, list(files), False)
695
695
696 def apply(self, repo, series, list=False, update_status=True,
696 def apply(self, repo, series, list=False, update_status=True,
697 strict=False, patchdir=None, merge=None, all_files=None,
697 strict=False, patchdir=None, merge=None, all_files=None,
698 tobackup=None, check=False):
698 tobackup=None, check=False):
699 wlock = lock = tr = None
699 wlock = lock = tr = None
700 try:
700 try:
701 wlock = repo.wlock()
701 wlock = repo.wlock()
702 lock = repo.lock()
702 lock = repo.lock()
703 tr = repo.transaction("qpush")
703 tr = repo.transaction("qpush")
704 try:
704 try:
705 ret = self._apply(repo, series, list, update_status,
705 ret = self._apply(repo, series, list, update_status,
706 strict, patchdir, merge, all_files=all_files,
706 strict, patchdir, merge, all_files=all_files,
707 tobackup=tobackup, check=check)
707 tobackup=tobackup, check=check)
708 tr.close()
708 tr.close()
709 self.savedirty()
709 self.savedirty()
710 return ret
710 return ret
711 except AbortNoCleanup:
711 except AbortNoCleanup:
712 tr.close()
712 tr.close()
713 self.savedirty()
713 self.savedirty()
714 return 2, repo.dirstate.p1()
714 return 2, repo.dirstate.p1()
715 except:
715 except:
716 try:
716 try:
717 tr.abort()
717 tr.abort()
718 finally:
718 finally:
719 repo.invalidate()
719 repo.invalidate()
720 repo.dirstate.invalidate()
720 repo.dirstate.invalidate()
721 self.invalidate()
721 self.invalidate()
722 raise
722 raise
723 finally:
723 finally:
724 release(tr, lock, wlock)
724 release(tr, lock, wlock)
725 self.removeundo(repo)
725 self.removeundo(repo)
726
726
727 def _apply(self, repo, series, list=False, update_status=True,
727 def _apply(self, repo, series, list=False, update_status=True,
728 strict=False, patchdir=None, merge=None, all_files=None,
728 strict=False, patchdir=None, merge=None, all_files=None,
729 tobackup=None, check=False):
729 tobackup=None, check=False):
730 """returns (error, hash)
730 """returns (error, hash)
731
731
732 error = 1 for unable to read, 2 for patch failed, 3 for patch
732 error = 1 for unable to read, 2 for patch failed, 3 for patch
733 fuzz. tobackup is None or a set of files to backup before they
733 fuzz. tobackup is None or a set of files to backup before they
734 are modified by a patch.
734 are modified by a patch.
735 """
735 """
736 # TODO unify with commands.py
736 # TODO unify with commands.py
737 if not patchdir:
737 if not patchdir:
738 patchdir = self.path
738 patchdir = self.path
739 err = 0
739 err = 0
740 n = None
740 n = None
741 for patchname in series:
741 for patchname in series:
742 pushable, reason = self.pushable(patchname)
742 pushable, reason = self.pushable(patchname)
743 if not pushable:
743 if not pushable:
744 self.explainpushable(patchname, all_patches=True)
744 self.explainpushable(patchname, all_patches=True)
745 continue
745 continue
746 self.ui.status(_("applying %s\n") % patchname)
746 self.ui.status(_("applying %s\n") % patchname)
747 pf = os.path.join(patchdir, patchname)
747 pf = os.path.join(patchdir, patchname)
748
748
749 try:
749 try:
750 ph = patchheader(self.join(patchname), self.plainmode)
750 ph = patchheader(self.join(patchname), self.plainmode)
751 except IOError:
751 except IOError:
752 self.ui.warn(_("unable to read %s\n") % patchname)
752 self.ui.warn(_("unable to read %s\n") % patchname)
753 err = 1
753 err = 1
754 break
754 break
755
755
756 message = ph.message
756 message = ph.message
757 if not message:
757 if not message:
758 # The commit message should not be translated
758 # The commit message should not be translated
759 message = "imported patch %s\n" % patchname
759 message = "imported patch %s\n" % patchname
760 else:
760 else:
761 if list:
761 if list:
762 # The commit message should not be translated
762 # The commit message should not be translated
763 message.append("\nimported patch %s" % patchname)
763 message.append("\nimported patch %s" % patchname)
764 message = '\n'.join(message)
764 message = '\n'.join(message)
765
765
766 if ph.haspatch:
766 if ph.haspatch:
767 if tobackup:
767 if tobackup:
768 touched = patchmod.changedfiles(self.ui, repo, pf)
768 touched = patchmod.changedfiles(self.ui, repo, pf)
769 touched = set(touched) & tobackup
769 touched = set(touched) & tobackup
770 if touched and check:
770 if touched and check:
771 raise AbortNoCleanup(
771 raise AbortNoCleanup(
772 _("local changes found, refresh first"))
772 _("local changes found, refresh first"))
773 self.backup(repo, touched, copy=True)
773 self.backup(repo, touched, copy=True)
774 tobackup = tobackup - touched
774 tobackup = tobackup - touched
775 (patcherr, files, fuzz) = self.patch(repo, pf)
775 (patcherr, files, fuzz) = self.patch(repo, pf)
776 if all_files is not None:
776 if all_files is not None:
777 all_files.update(files)
777 all_files.update(files)
778 patcherr = not patcherr
778 patcherr = not patcherr
779 else:
779 else:
780 self.ui.warn(_("patch %s is empty\n") % patchname)
780 self.ui.warn(_("patch %s is empty\n") % patchname)
781 patcherr, files, fuzz = 0, [], 0
781 patcherr, files, fuzz = 0, [], 0
782
782
783 if merge and files:
783 if merge and files:
784 # Mark as removed/merged and update dirstate parent info
784 # Mark as removed/merged and update dirstate parent info
785 removed = []
785 removed = []
786 merged = []
786 merged = []
787 for f in files:
787 for f in files:
788 if os.path.lexists(repo.wjoin(f)):
788 if os.path.lexists(repo.wjoin(f)):
789 merged.append(f)
789 merged.append(f)
790 else:
790 else:
791 removed.append(f)
791 removed.append(f)
792 for f in removed:
792 for f in removed:
793 repo.dirstate.remove(f)
793 repo.dirstate.remove(f)
794 for f in merged:
794 for f in merged:
795 repo.dirstate.merge(f)
795 repo.dirstate.merge(f)
796 p1, p2 = repo.dirstate.parents()
796 p1, p2 = repo.dirstate.parents()
797 repo.setparents(p1, merge)
797 repo.setparents(p1, merge)
798
798
799 match = scmutil.matchfiles(repo, files or [])
799 match = scmutil.matchfiles(repo, files or [])
800 oldtip = repo['tip']
800 oldtip = repo['tip']
801 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
801 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
802 force=True)
802 force=True)
803 if repo['tip'] == oldtip:
803 if repo['tip'] == oldtip:
804 raise util.Abort(_("qpush exactly duplicates child changeset"))
804 raise util.Abort(_("qpush exactly duplicates child changeset"))
805 if n is None:
805 if n is None:
806 raise util.Abort(_("repository commit failed"))
806 raise util.Abort(_("repository commit failed"))
807
807
808 if update_status:
808 if update_status:
809 self.applied.append(statusentry(n, patchname))
809 self.applied.append(statusentry(n, patchname))
810
810
811 if patcherr:
811 if patcherr:
812 self.ui.warn(_("patch failed, rejects left in working dir\n"))
812 self.ui.warn(_("patch failed, rejects left in working dir\n"))
813 err = 2
813 err = 2
814 break
814 break
815
815
816 if fuzz and strict:
816 if fuzz and strict:
817 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
817 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
818 err = 3
818 err = 3
819 break
819 break
820 return (err, n)
820 return (err, n)
821
821
822 def _cleanup(self, patches, numrevs, keep=False):
822 def _cleanup(self, patches, numrevs, keep=False):
823 if not keep:
823 if not keep:
824 r = self.qrepo()
824 r = self.qrepo()
825 if r:
825 if r:
826 r[None].forget(patches)
826 r[None].forget(patches)
827 for p in patches:
827 for p in patches:
828 os.unlink(self.join(p))
828 os.unlink(self.join(p))
829
829
830 qfinished = []
830 qfinished = []
831 if numrevs:
831 if numrevs:
832 qfinished = self.applied[:numrevs]
832 qfinished = self.applied[:numrevs]
833 del self.applied[:numrevs]
833 del self.applied[:numrevs]
834 self.applieddirty = True
834 self.applieddirty = True
835
835
836 unknown = []
836 unknown = []
837
837
838 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
838 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
839 reverse=True):
839 reverse=True):
840 if i is not None:
840 if i is not None:
841 del self.fullseries[i]
841 del self.fullseries[i]
842 else:
842 else:
843 unknown.append(p)
843 unknown.append(p)
844
844
845 if unknown:
845 if unknown:
846 if numrevs:
846 if numrevs:
847 rev = dict((entry.name, entry.node) for entry in qfinished)
847 rev = dict((entry.name, entry.node) for entry in qfinished)
848 for p in unknown:
848 for p in unknown:
849 msg = _('revision %s refers to unknown patches: %s\n')
849 msg = _('revision %s refers to unknown patches: %s\n')
850 self.ui.warn(msg % (short(rev[p]), p))
850 self.ui.warn(msg % (short(rev[p]), p))
851 else:
851 else:
852 msg = _('unknown patches: %s\n')
852 msg = _('unknown patches: %s\n')
853 raise util.Abort(''.join(msg % p for p in unknown))
853 raise util.Abort(''.join(msg % p for p in unknown))
854
854
855 self.parseseries()
855 self.parseseries()
856 self.seriesdirty = True
856 self.seriesdirty = True
857 return [entry.node for entry in qfinished]
857 return [entry.node for entry in qfinished]
858
858
859 def _revpatches(self, repo, revs):
859 def _revpatches(self, repo, revs):
860 firstrev = repo[self.applied[0].node].rev()
860 firstrev = repo[self.applied[0].node].rev()
861 patches = []
861 patches = []
862 for i, rev in enumerate(revs):
862 for i, rev in enumerate(revs):
863
863
864 if rev < firstrev:
864 if rev < firstrev:
865 raise util.Abort(_('revision %d is not managed') % rev)
865 raise util.Abort(_('revision %d is not managed') % rev)
866
866
867 ctx = repo[rev]
867 ctx = repo[rev]
868 base = self.applied[i].node
868 base = self.applied[i].node
869 if ctx.node() != base:
869 if ctx.node() != base:
870 msg = _('cannot delete revision %d above applied patches')
870 msg = _('cannot delete revision %d above applied patches')
871 raise util.Abort(msg % rev)
871 raise util.Abort(msg % rev)
872
872
873 patch = self.applied[i].name
873 patch = self.applied[i].name
874 for fmt in ('[mq]: %s', 'imported patch %s'):
874 for fmt in ('[mq]: %s', 'imported patch %s'):
875 if ctx.description() == fmt % patch:
875 if ctx.description() == fmt % patch:
876 msg = _('patch %s finalized without changeset message\n')
876 msg = _('patch %s finalized without changeset message\n')
877 repo.ui.status(msg % patch)
877 repo.ui.status(msg % patch)
878 break
878 break
879
879
880 patches.append(patch)
880 patches.append(patch)
881 return patches
881 return patches
882
882
883 def finish(self, repo, revs):
883 def finish(self, repo, revs):
884 # Manually trigger phase computation to ensure phasedefaults is
884 # Manually trigger phase computation to ensure phasedefaults is
885 # executed before we remove the patches.
885 # executed before we remove the patches.
886 repo._phasecache
886 repo._phasecache
887 patches = self._revpatches(repo, sorted(revs))
887 patches = self._revpatches(repo, sorted(revs))
888 qfinished = self._cleanup(patches, len(patches))
888 qfinished = self._cleanup(patches, len(patches))
889 if qfinished and repo.ui.configbool('mq', 'secret', False):
889 if qfinished and repo.ui.configbool('mq', 'secret', False):
890 # only use this logic when the secret option is added
890 # only use this logic when the secret option is added
891 oldqbase = repo[qfinished[0]]
891 oldqbase = repo[qfinished[0]]
892 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
892 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
893 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
893 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
894 phases.advanceboundary(repo, tphase, qfinished)
894 phases.advanceboundary(repo, tphase, qfinished)
895
895
896 def delete(self, repo, patches, opts):
896 def delete(self, repo, patches, opts):
897 if not patches and not opts.get('rev'):
897 if not patches and not opts.get('rev'):
898 raise util.Abort(_('qdelete requires at least one revision or '
898 raise util.Abort(_('qdelete requires at least one revision or '
899 'patch name'))
899 'patch name'))
900
900
901 realpatches = []
901 realpatches = []
902 for patch in patches:
902 for patch in patches:
903 patch = self.lookup(patch, strict=True)
903 patch = self.lookup(patch, strict=True)
904 info = self.isapplied(patch)
904 info = self.isapplied(patch)
905 if info:
905 if info:
906 raise util.Abort(_("cannot delete applied patch %s") % patch)
906 raise util.Abort(_("cannot delete applied patch %s") % patch)
907 if patch not in self.series:
907 if patch not in self.series:
908 raise util.Abort(_("patch %s not in series file") % patch)
908 raise util.Abort(_("patch %s not in series file") % patch)
909 if patch not in realpatches:
909 if patch not in realpatches:
910 realpatches.append(patch)
910 realpatches.append(patch)
911
911
912 numrevs = 0
912 numrevs = 0
913 if opts.get('rev'):
913 if opts.get('rev'):
914 if not self.applied:
914 if not self.applied:
915 raise util.Abort(_('no patches applied'))
915 raise util.Abort(_('no patches applied'))
916 revs = scmutil.revrange(repo, opts.get('rev'))
916 revs = scmutil.revrange(repo, opts.get('rev'))
917 if len(revs) > 1 and revs[0] > revs[1]:
917 if len(revs) > 1 and revs[0] > revs[1]:
918 revs.reverse()
918 revs.reverse()
919 revpatches = self._revpatches(repo, revs)
919 revpatches = self._revpatches(repo, revs)
920 realpatches += revpatches
920 realpatches += revpatches
921 numrevs = len(revpatches)
921 numrevs = len(revpatches)
922
922
923 self._cleanup(realpatches, numrevs, opts.get('keep'))
923 self._cleanup(realpatches, numrevs, opts.get('keep'))
924
924
925 def checktoppatch(self, repo):
925 def checktoppatch(self, repo):
926 if self.applied:
926 if self.applied:
927 top = self.applied[-1].node
927 top = self.applied[-1].node
928 patch = self.applied[-1].name
928 patch = self.applied[-1].name
929 pp = repo.dirstate.parents()
929 pp = repo.dirstate.parents()
930 if top not in pp:
930 if top not in pp:
931 raise util.Abort(_("working directory revision is not qtip"))
931 raise util.Abort(_("working directory revision is not qtip"))
932 return top, patch
932 return top, patch
933 return None, None
933 return None, None
934
934
935 def checksubstate(self, repo):
935 def checksubstate(self, repo):
936 '''return list of subrepos at a different revision than substate.
936 '''return list of subrepos at a different revision than substate.
937 Abort if any subrepos have uncommitted changes.'''
937 Abort if any subrepos have uncommitted changes.'''
938 inclsubs = []
938 inclsubs = []
939 wctx = repo[None]
939 wctx = repo[None]
940 for s in wctx.substate:
940 for s in wctx.substate:
941 if wctx.sub(s).dirty(True):
941 if wctx.sub(s).dirty(True):
942 raise util.Abort(
942 raise util.Abort(
943 _("uncommitted changes in subrepository %s") % s)
943 _("uncommitted changes in subrepository %s") % s)
944 elif wctx.sub(s).dirty():
944 elif wctx.sub(s).dirty():
945 inclsubs.append(s)
945 inclsubs.append(s)
946 return inclsubs
946 return inclsubs
947
947
948 def localchangesfound(self, refresh=True):
948 def localchangesfound(self, refresh=True):
949 if refresh:
949 if refresh:
950 raise util.Abort(_("local changes found, refresh first"))
950 raise util.Abort(_("local changes found, refresh first"))
951 else:
951 else:
952 raise util.Abort(_("local changes found"))
952 raise util.Abort(_("local changes found"))
953
953
954 def checklocalchanges(self, repo, force=False, refresh=True):
954 def checklocalchanges(self, repo, force=False, refresh=True):
955 m, a, r, d = repo.status()[:4]
955 m, a, r, d = repo.status()[:4]
956 if (m or a or r or d) and not force:
956 if (m or a or r or d) and not force:
957 self.localchangesfound(refresh)
957 self.localchangesfound(refresh)
958 return m, a, r, d
958 return m, a, r, d
959
959
960 _reserved = ('series', 'status', 'guards', '.', '..')
960 _reserved = ('series', 'status', 'guards', '.', '..')
961 def checkreservedname(self, name):
961 def checkreservedname(self, name):
962 if name in self._reserved:
962 if name in self._reserved:
963 raise util.Abort(_('"%s" cannot be used as the name of a patch')
963 raise util.Abort(_('"%s" cannot be used as the name of a patch')
964 % name)
964 % name)
965 for prefix in ('.hg', '.mq'):
965 for prefix in ('.hg', '.mq'):
966 if name.startswith(prefix):
966 if name.startswith(prefix):
967 raise util.Abort(_('patch name cannot begin with "%s"')
967 raise util.Abort(_('patch name cannot begin with "%s"')
968 % prefix)
968 % prefix)
969 for c in ('#', ':'):
969 for c in ('#', ':'):
970 if c in name:
970 if c in name:
971 raise util.Abort(_('"%s" cannot be used in the name of a patch')
971 raise util.Abort(_('"%s" cannot be used in the name of a patch')
972 % c)
972 % c)
973
973
974 def checkpatchname(self, name, force=False):
974 def checkpatchname(self, name, force=False):
975 self.checkreservedname(name)
975 self.checkreservedname(name)
976 if not force and os.path.exists(self.join(name)):
976 if not force and os.path.exists(self.join(name)):
977 if os.path.isdir(self.join(name)):
977 if os.path.isdir(self.join(name)):
978 raise util.Abort(_('"%s" already exists as a directory')
978 raise util.Abort(_('"%s" already exists as a directory')
979 % name)
979 % name)
980 else:
980 else:
981 raise util.Abort(_('patch "%s" already exists') % name)
981 raise util.Abort(_('patch "%s" already exists') % name)
982
982
983 def checkforcecheck(self, check, force):
983 def checkforcecheck(self, check, force):
984 if force and check:
984 if force and check:
985 raise util.Abort(_('cannot use both --force and --check'))
985 raise util.Abort(_('cannot use both --force and --check'))
986
986
987 def new(self, repo, patchfn, *pats, **opts):
987 def new(self, repo, patchfn, *pats, **opts):
988 """options:
988 """options:
989 msg: a string or a no-argument function returning a string
989 msg: a string or a no-argument function returning a string
990 """
990 """
991 msg = opts.get('msg')
991 msg = opts.get('msg')
992 user = opts.get('user')
992 user = opts.get('user')
993 date = opts.get('date')
993 date = opts.get('date')
994 if date:
994 if date:
995 date = util.parsedate(date)
995 date = util.parsedate(date)
996 diffopts = self.diffopts({'git': opts.get('git')})
996 diffopts = self.diffopts({'git': opts.get('git')})
997 if opts.get('checkname', True):
997 if opts.get('checkname', True):
998 self.checkpatchname(patchfn)
998 self.checkpatchname(patchfn)
999 inclsubs = self.checksubstate(repo)
999 inclsubs = self.checksubstate(repo)
1000 if inclsubs:
1000 if inclsubs:
1001 inclsubs.append('.hgsubstate')
1001 inclsubs.append('.hgsubstate')
1002 substatestate = repo.dirstate['.hgsubstate']
1002 substatestate = repo.dirstate['.hgsubstate']
1003 if opts.get('include') or opts.get('exclude') or pats:
1003 if opts.get('include') or opts.get('exclude') or pats:
1004 if inclsubs:
1004 if inclsubs:
1005 pats = list(pats or []) + inclsubs
1005 pats = list(pats or []) + inclsubs
1006 match = scmutil.match(repo[None], pats, opts)
1006 match = scmutil.match(repo[None], pats, opts)
1007 # detect missing files in pats
1007 # detect missing files in pats
1008 def badfn(f, msg):
1008 def badfn(f, msg):
1009 if f != '.hgsubstate': # .hgsubstate is auto-created
1009 if f != '.hgsubstate': # .hgsubstate is auto-created
1010 raise util.Abort('%s: %s' % (f, msg))
1010 raise util.Abort('%s: %s' % (f, msg))
1011 match.bad = badfn
1011 match.bad = badfn
1012 changes = repo.status(match=match)
1012 changes = repo.status(match=match)
1013 m, a, r, d = changes[:4]
1013 m, a, r, d = changes[:4]
1014 else:
1014 else:
1015 changes = self.checklocalchanges(repo, force=True)
1015 changes = self.checklocalchanges(repo, force=True)
1016 m, a, r, d = changes
1016 m, a, r, d = changes
1017 match = scmutil.matchfiles(repo, m + a + r + inclsubs)
1017 match = scmutil.matchfiles(repo, m + a + r + inclsubs)
1018 if len(repo[None].parents()) > 1:
1018 if len(repo[None].parents()) > 1:
1019 raise util.Abort(_('cannot manage merge changesets'))
1019 raise util.Abort(_('cannot manage merge changesets'))
1020 commitfiles = m + a + r
1020 commitfiles = m + a + r
1021 self.checktoppatch(repo)
1021 self.checktoppatch(repo)
1022 insert = self.fullseriesend()
1022 insert = self.fullseriesend()
1023 wlock = repo.wlock()
1023 wlock = repo.wlock()
1024 try:
1024 try:
1025 try:
1025 try:
1026 # if patch file write fails, abort early
1026 # if patch file write fails, abort early
1027 p = self.opener(patchfn, "w")
1027 p = self.opener(patchfn, "w")
1028 except IOError, e:
1028 except IOError, e:
1029 raise util.Abort(_('cannot write patch "%s": %s')
1029 raise util.Abort(_('cannot write patch "%s": %s')
1030 % (patchfn, e.strerror))
1030 % (patchfn, e.strerror))
1031 try:
1031 try:
1032 if self.plainmode:
1032 if self.plainmode:
1033 if user:
1033 if user:
1034 p.write("From: " + user + "\n")
1034 p.write("From: " + user + "\n")
1035 if not date:
1035 if not date:
1036 p.write("\n")
1036 p.write("\n")
1037 if date:
1037 if date:
1038 p.write("Date: %d %d\n\n" % date)
1038 p.write("Date: %d %d\n\n" % date)
1039 else:
1039 else:
1040 p.write("# HG changeset patch\n")
1040 p.write("# HG changeset patch\n")
1041 p.write("# Parent "
1041 p.write("# Parent "
1042 + hex(repo[None].p1().node()) + "\n")
1042 + hex(repo[None].p1().node()) + "\n")
1043 if user:
1043 if user:
1044 p.write("# User " + user + "\n")
1044 p.write("# User " + user + "\n")
1045 if date:
1045 if date:
1046 p.write("# Date %s %s\n\n" % date)
1046 p.write("# Date %s %s\n\n" % date)
1047 if util.safehasattr(msg, '__call__'):
1047 if util.safehasattr(msg, '__call__'):
1048 msg = msg()
1048 msg = msg()
1049 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
1049 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
1050 n = newcommit(repo, None, commitmsg, user, date, match=match,
1050 n = newcommit(repo, None, commitmsg, user, date, match=match,
1051 force=True)
1051 force=True)
1052 if n is None:
1052 if n is None:
1053 raise util.Abort(_("repo commit failed"))
1053 raise util.Abort(_("repo commit failed"))
1054 try:
1054 try:
1055 self.fullseries[insert:insert] = [patchfn]
1055 self.fullseries[insert:insert] = [patchfn]
1056 self.applied.append(statusentry(n, patchfn))
1056 self.applied.append(statusentry(n, patchfn))
1057 self.parseseries()
1057 self.parseseries()
1058 self.seriesdirty = True
1058 self.seriesdirty = True
1059 self.applieddirty = True
1059 self.applieddirty = True
1060 if msg:
1060 if msg:
1061 msg = msg + "\n\n"
1061 msg = msg + "\n\n"
1062 p.write(msg)
1062 p.write(msg)
1063 if commitfiles:
1063 if commitfiles:
1064 parent = self.qparents(repo, n)
1064 parent = self.qparents(repo, n)
1065 if inclsubs:
1065 if inclsubs:
1066 if substatestate in 'a?':
1066 if substatestate in 'a?':
1067 changes[1].append('.hgsubstate')
1067 changes[1].append('.hgsubstate')
1068 elif substatestate in 'r':
1068 elif substatestate in 'r':
1069 changes[2].append('.hgsubstate')
1069 changes[2].append('.hgsubstate')
1070 else: # modified
1070 else: # modified
1071 changes[0].append('.hgsubstate')
1071 changes[0].append('.hgsubstate')
1072 chunks = patchmod.diff(repo, node1=parent, node2=n,
1072 chunks = patchmod.diff(repo, node1=parent, node2=n,
1073 changes=changes, opts=diffopts)
1073 changes=changes, opts=diffopts)
1074 for chunk in chunks:
1074 for chunk in chunks:
1075 p.write(chunk)
1075 p.write(chunk)
1076 p.close()
1076 p.close()
1077 r = self.qrepo()
1077 r = self.qrepo()
1078 if r:
1078 if r:
1079 r[None].add([patchfn])
1079 r[None].add([patchfn])
1080 except:
1080 except:
1081 repo.rollback()
1081 repo.rollback()
1082 raise
1082 raise
1083 except Exception:
1083 except Exception:
1084 patchpath = self.join(patchfn)
1084 patchpath = self.join(patchfn)
1085 try:
1085 try:
1086 os.unlink(patchpath)
1086 os.unlink(patchpath)
1087 except:
1087 except OSError:
1088 self.ui.warn(_('error unlinking %s\n') % patchpath)
1088 self.ui.warn(_('error unlinking %s\n') % patchpath)
1089 raise
1089 raise
1090 self.removeundo(repo)
1090 self.removeundo(repo)
1091 finally:
1091 finally:
1092 release(wlock)
1092 release(wlock)
1093
1093
1094 def strip(self, repo, revs, update=True, backup="all", force=None):
1094 def strip(self, repo, revs, update=True, backup="all", force=None):
1095 wlock = lock = None
1095 wlock = lock = None
1096 try:
1096 try:
1097 wlock = repo.wlock()
1097 wlock = repo.wlock()
1098 lock = repo.lock()
1098 lock = repo.lock()
1099
1099
1100 if update:
1100 if update:
1101 self.checklocalchanges(repo, force=force, refresh=False)
1101 self.checklocalchanges(repo, force=force, refresh=False)
1102 urev = self.qparents(repo, revs[0])
1102 urev = self.qparents(repo, revs[0])
1103 hg.clean(repo, urev)
1103 hg.clean(repo, urev)
1104 repo.dirstate.write()
1104 repo.dirstate.write()
1105
1105
1106 repair.strip(self.ui, repo, revs, backup)
1106 repair.strip(self.ui, repo, revs, backup)
1107 finally:
1107 finally:
1108 release(lock, wlock)
1108 release(lock, wlock)
1109
1109
1110 def isapplied(self, patch):
1110 def isapplied(self, patch):
1111 """returns (index, rev, patch)"""
1111 """returns (index, rev, patch)"""
1112 for i, a in enumerate(self.applied):
1112 for i, a in enumerate(self.applied):
1113 if a.name == patch:
1113 if a.name == patch:
1114 return (i, a.node, a.name)
1114 return (i, a.node, a.name)
1115 return None
1115 return None
1116
1116
1117 # if the exact patch name does not exist, we try a few
1117 # if the exact patch name does not exist, we try a few
1118 # variations. If strict is passed, we try only #1
1118 # variations. If strict is passed, we try only #1
1119 #
1119 #
1120 # 1) a number (as string) to indicate an offset in the series file
1120 # 1) a number (as string) to indicate an offset in the series file
1121 # 2) a unique substring of the patch name was given
1121 # 2) a unique substring of the patch name was given
1122 # 3) patchname[-+]num to indicate an offset in the series file
1122 # 3) patchname[-+]num to indicate an offset in the series file
1123 def lookup(self, patch, strict=False):
1123 def lookup(self, patch, strict=False):
1124 def partialname(s):
1124 def partialname(s):
1125 if s in self.series:
1125 if s in self.series:
1126 return s
1126 return s
1127 matches = [x for x in self.series if s in x]
1127 matches = [x for x in self.series if s in x]
1128 if len(matches) > 1:
1128 if len(matches) > 1:
1129 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1129 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1130 for m in matches:
1130 for m in matches:
1131 self.ui.warn(' %s\n' % m)
1131 self.ui.warn(' %s\n' % m)
1132 return None
1132 return None
1133 if matches:
1133 if matches:
1134 return matches[0]
1134 return matches[0]
1135 if self.series and self.applied:
1135 if self.series and self.applied:
1136 if s == 'qtip':
1136 if s == 'qtip':
1137 return self.series[self.seriesend(True)-1]
1137 return self.series[self.seriesend(True)-1]
1138 if s == 'qbase':
1138 if s == 'qbase':
1139 return self.series[0]
1139 return self.series[0]
1140 return None
1140 return None
1141
1141
1142 if patch in self.series:
1142 if patch in self.series:
1143 return patch
1143 return patch
1144
1144
1145 if not os.path.isfile(self.join(patch)):
1145 if not os.path.isfile(self.join(patch)):
1146 try:
1146 try:
1147 sno = int(patch)
1147 sno = int(patch)
1148 except (ValueError, OverflowError):
1148 except (ValueError, OverflowError):
1149 pass
1149 pass
1150 else:
1150 else:
1151 if -len(self.series) <= sno < len(self.series):
1151 if -len(self.series) <= sno < len(self.series):
1152 return self.series[sno]
1152 return self.series[sno]
1153
1153
1154 if not strict:
1154 if not strict:
1155 res = partialname(patch)
1155 res = partialname(patch)
1156 if res:
1156 if res:
1157 return res
1157 return res
1158 minus = patch.rfind('-')
1158 minus = patch.rfind('-')
1159 if minus >= 0:
1159 if minus >= 0:
1160 res = partialname(patch[:minus])
1160 res = partialname(patch[:minus])
1161 if res:
1161 if res:
1162 i = self.series.index(res)
1162 i = self.series.index(res)
1163 try:
1163 try:
1164 off = int(patch[minus + 1:] or 1)
1164 off = int(patch[minus + 1:] or 1)
1165 except (ValueError, OverflowError):
1165 except (ValueError, OverflowError):
1166 pass
1166 pass
1167 else:
1167 else:
1168 if i - off >= 0:
1168 if i - off >= 0:
1169 return self.series[i - off]
1169 return self.series[i - off]
1170 plus = patch.rfind('+')
1170 plus = patch.rfind('+')
1171 if plus >= 0:
1171 if plus >= 0:
1172 res = partialname(patch[:plus])
1172 res = partialname(patch[:plus])
1173 if res:
1173 if res:
1174 i = self.series.index(res)
1174 i = self.series.index(res)
1175 try:
1175 try:
1176 off = int(patch[plus + 1:] or 1)
1176 off = int(patch[plus + 1:] or 1)
1177 except (ValueError, OverflowError):
1177 except (ValueError, OverflowError):
1178 pass
1178 pass
1179 else:
1179 else:
1180 if i + off < len(self.series):
1180 if i + off < len(self.series):
1181 return self.series[i + off]
1181 return self.series[i + off]
1182 raise util.Abort(_("patch %s not in series") % patch)
1182 raise util.Abort(_("patch %s not in series") % patch)
1183
1183
1184 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1184 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1185 all=False, move=False, exact=False, nobackup=False, check=False):
1185 all=False, move=False, exact=False, nobackup=False, check=False):
1186 self.checkforcecheck(check, force)
1186 self.checkforcecheck(check, force)
1187 diffopts = self.diffopts()
1187 diffopts = self.diffopts()
1188 wlock = repo.wlock()
1188 wlock = repo.wlock()
1189 try:
1189 try:
1190 heads = []
1190 heads = []
1191 for b, ls in repo.branchmap().iteritems():
1191 for b, ls in repo.branchmap().iteritems():
1192 heads += ls
1192 heads += ls
1193 if not heads:
1193 if not heads:
1194 heads = [nullid]
1194 heads = [nullid]
1195 if repo.dirstate.p1() not in heads and not exact:
1195 if repo.dirstate.p1() not in heads and not exact:
1196 self.ui.status(_("(working directory not at a head)\n"))
1196 self.ui.status(_("(working directory not at a head)\n"))
1197
1197
1198 if not self.series:
1198 if not self.series:
1199 self.ui.warn(_('no patches in series\n'))
1199 self.ui.warn(_('no patches in series\n'))
1200 return 0
1200 return 0
1201
1201
1202 # Suppose our series file is: A B C and the current 'top'
1202 # Suppose our series file is: A B C and the current 'top'
1203 # patch is B. qpush C should be performed (moving forward)
1203 # patch is B. qpush C should be performed (moving forward)
1204 # qpush B is a NOP (no change) qpush A is an error (can't
1204 # qpush B is a NOP (no change) qpush A is an error (can't
1205 # go backwards with qpush)
1205 # go backwards with qpush)
1206 if patch:
1206 if patch:
1207 patch = self.lookup(patch)
1207 patch = self.lookup(patch)
1208 info = self.isapplied(patch)
1208 info = self.isapplied(patch)
1209 if info and info[0] >= len(self.applied) - 1:
1209 if info and info[0] >= len(self.applied) - 1:
1210 self.ui.warn(
1210 self.ui.warn(
1211 _('qpush: %s is already at the top\n') % patch)
1211 _('qpush: %s is already at the top\n') % patch)
1212 return 0
1212 return 0
1213
1213
1214 pushable, reason = self.pushable(patch)
1214 pushable, reason = self.pushable(patch)
1215 if pushable:
1215 if pushable:
1216 if self.series.index(patch) < self.seriesend():
1216 if self.series.index(patch) < self.seriesend():
1217 raise util.Abort(
1217 raise util.Abort(
1218 _("cannot push to a previous patch: %s") % patch)
1218 _("cannot push to a previous patch: %s") % patch)
1219 else:
1219 else:
1220 if reason:
1220 if reason:
1221 reason = _('guarded by %s') % reason
1221 reason = _('guarded by %s') % reason
1222 else:
1222 else:
1223 reason = _('no matching guards')
1223 reason = _('no matching guards')
1224 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1224 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1225 return 1
1225 return 1
1226 elif all:
1226 elif all:
1227 patch = self.series[-1]
1227 patch = self.series[-1]
1228 if self.isapplied(patch):
1228 if self.isapplied(patch):
1229 self.ui.warn(_('all patches are currently applied\n'))
1229 self.ui.warn(_('all patches are currently applied\n'))
1230 return 0
1230 return 0
1231
1231
1232 # Following the above example, starting at 'top' of B:
1232 # Following the above example, starting at 'top' of B:
1233 # qpush should be performed (pushes C), but a subsequent
1233 # qpush should be performed (pushes C), but a subsequent
1234 # qpush without an argument is an error (nothing to
1234 # qpush without an argument is an error (nothing to
1235 # apply). This allows a loop of "...while hg qpush..." to
1235 # apply). This allows a loop of "...while hg qpush..." to
1236 # work as it detects an error when done
1236 # work as it detects an error when done
1237 start = self.seriesend()
1237 start = self.seriesend()
1238 if start == len(self.series):
1238 if start == len(self.series):
1239 self.ui.warn(_('patch series already fully applied\n'))
1239 self.ui.warn(_('patch series already fully applied\n'))
1240 return 1
1240 return 1
1241 if not force and not check:
1241 if not force and not check:
1242 self.checklocalchanges(repo, refresh=self.applied)
1242 self.checklocalchanges(repo, refresh=self.applied)
1243
1243
1244 if exact:
1244 if exact:
1245 if check:
1245 if check:
1246 raise util.Abort(
1246 raise util.Abort(
1247 _("cannot use --exact and --check together"))
1247 _("cannot use --exact and --check together"))
1248 if move:
1248 if move:
1249 raise util.Abort(_('cannot use --exact and --move '
1249 raise util.Abort(_('cannot use --exact and --move '
1250 'together'))
1250 'together'))
1251 if self.applied:
1251 if self.applied:
1252 raise util.Abort(_('cannot push --exact with applied '
1252 raise util.Abort(_('cannot push --exact with applied '
1253 'patches'))
1253 'patches'))
1254 root = self.series[start]
1254 root = self.series[start]
1255 target = patchheader(self.join(root), self.plainmode).parent
1255 target = patchheader(self.join(root), self.plainmode).parent
1256 if not target:
1256 if not target:
1257 raise util.Abort(
1257 raise util.Abort(
1258 _("%s does not have a parent recorded") % root)
1258 _("%s does not have a parent recorded") % root)
1259 if not repo[target] == repo['.']:
1259 if not repo[target] == repo['.']:
1260 hg.update(repo, target)
1260 hg.update(repo, target)
1261
1261
1262 if move:
1262 if move:
1263 if not patch:
1263 if not patch:
1264 raise util.Abort(_("please specify the patch to move"))
1264 raise util.Abort(_("please specify the patch to move"))
1265 for fullstart, rpn in enumerate(self.fullseries):
1265 for fullstart, rpn in enumerate(self.fullseries):
1266 # strip markers for patch guards
1266 # strip markers for patch guards
1267 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1267 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1268 break
1268 break
1269 for i, rpn in enumerate(self.fullseries[fullstart:]):
1269 for i, rpn in enumerate(self.fullseries[fullstart:]):
1270 # strip markers for patch guards
1270 # strip markers for patch guards
1271 if self.guard_re.split(rpn, 1)[0] == patch:
1271 if self.guard_re.split(rpn, 1)[0] == patch:
1272 break
1272 break
1273 index = fullstart + i
1273 index = fullstart + i
1274 assert index < len(self.fullseries)
1274 assert index < len(self.fullseries)
1275 fullpatch = self.fullseries[index]
1275 fullpatch = self.fullseries[index]
1276 del self.fullseries[index]
1276 del self.fullseries[index]
1277 self.fullseries.insert(fullstart, fullpatch)
1277 self.fullseries.insert(fullstart, fullpatch)
1278 self.parseseries()
1278 self.parseseries()
1279 self.seriesdirty = True
1279 self.seriesdirty = True
1280
1280
1281 self.applieddirty = True
1281 self.applieddirty = True
1282 if start > 0:
1282 if start > 0:
1283 self.checktoppatch(repo)
1283 self.checktoppatch(repo)
1284 if not patch:
1284 if not patch:
1285 patch = self.series[start]
1285 patch = self.series[start]
1286 end = start + 1
1286 end = start + 1
1287 else:
1287 else:
1288 end = self.series.index(patch, start) + 1
1288 end = self.series.index(patch, start) + 1
1289
1289
1290 tobackup = set()
1290 tobackup = set()
1291 if (not nobackup and force) or check:
1291 if (not nobackup and force) or check:
1292 m, a, r, d = self.checklocalchanges(repo, force=True)
1292 m, a, r, d = self.checklocalchanges(repo, force=True)
1293 if check:
1293 if check:
1294 tobackup.update(m + a + r + d)
1294 tobackup.update(m + a + r + d)
1295 else:
1295 else:
1296 tobackup.update(m + a)
1296 tobackup.update(m + a)
1297
1297
1298 s = self.series[start:end]
1298 s = self.series[start:end]
1299 all_files = set()
1299 all_files = set()
1300 try:
1300 try:
1301 if mergeq:
1301 if mergeq:
1302 ret = self.mergepatch(repo, mergeq, s, diffopts)
1302 ret = self.mergepatch(repo, mergeq, s, diffopts)
1303 else:
1303 else:
1304 ret = self.apply(repo, s, list, all_files=all_files,
1304 ret = self.apply(repo, s, list, all_files=all_files,
1305 tobackup=tobackup, check=check)
1305 tobackup=tobackup, check=check)
1306 except:
1306 except:
1307 self.ui.warn(_('cleaning up working directory...'))
1307 self.ui.warn(_('cleaning up working directory...'))
1308 node = repo.dirstate.p1()
1308 node = repo.dirstate.p1()
1309 hg.revert(repo, node, None)
1309 hg.revert(repo, node, None)
1310 # only remove unknown files that we know we touched or
1310 # only remove unknown files that we know we touched or
1311 # created while patching
1311 # created while patching
1312 for f in all_files:
1312 for f in all_files:
1313 if f not in repo.dirstate:
1313 if f not in repo.dirstate:
1314 try:
1314 try:
1315 util.unlinkpath(repo.wjoin(f))
1315 util.unlinkpath(repo.wjoin(f))
1316 except OSError, inst:
1316 except OSError, inst:
1317 if inst.errno != errno.ENOENT:
1317 if inst.errno != errno.ENOENT:
1318 raise
1318 raise
1319 self.ui.warn(_('done\n'))
1319 self.ui.warn(_('done\n'))
1320 raise
1320 raise
1321
1321
1322 if not self.applied:
1322 if not self.applied:
1323 return ret[0]
1323 return ret[0]
1324 top = self.applied[-1].name
1324 top = self.applied[-1].name
1325 if ret[0] and ret[0] > 1:
1325 if ret[0] and ret[0] > 1:
1326 msg = _("errors during apply, please fix and refresh %s\n")
1326 msg = _("errors during apply, please fix and refresh %s\n")
1327 self.ui.write(msg % top)
1327 self.ui.write(msg % top)
1328 else:
1328 else:
1329 self.ui.write(_("now at: %s\n") % top)
1329 self.ui.write(_("now at: %s\n") % top)
1330 return ret[0]
1330 return ret[0]
1331
1331
1332 finally:
1332 finally:
1333 wlock.release()
1333 wlock.release()
1334
1334
1335 def pop(self, repo, patch=None, force=False, update=True, all=False,
1335 def pop(self, repo, patch=None, force=False, update=True, all=False,
1336 nobackup=False, check=False):
1336 nobackup=False, check=False):
1337 self.checkforcecheck(check, force)
1337 self.checkforcecheck(check, force)
1338 wlock = repo.wlock()
1338 wlock = repo.wlock()
1339 try:
1339 try:
1340 if patch:
1340 if patch:
1341 # index, rev, patch
1341 # index, rev, patch
1342 info = self.isapplied(patch)
1342 info = self.isapplied(patch)
1343 if not info:
1343 if not info:
1344 patch = self.lookup(patch)
1344 patch = self.lookup(patch)
1345 info = self.isapplied(patch)
1345 info = self.isapplied(patch)
1346 if not info:
1346 if not info:
1347 raise util.Abort(_("patch %s is not applied") % patch)
1347 raise util.Abort(_("patch %s is not applied") % patch)
1348
1348
1349 if not self.applied:
1349 if not self.applied:
1350 # Allow qpop -a to work repeatedly,
1350 # Allow qpop -a to work repeatedly,
1351 # but not qpop without an argument
1351 # but not qpop without an argument
1352 self.ui.warn(_("no patches applied\n"))
1352 self.ui.warn(_("no patches applied\n"))
1353 return not all
1353 return not all
1354
1354
1355 if all:
1355 if all:
1356 start = 0
1356 start = 0
1357 elif patch:
1357 elif patch:
1358 start = info[0] + 1
1358 start = info[0] + 1
1359 else:
1359 else:
1360 start = len(self.applied) - 1
1360 start = len(self.applied) - 1
1361
1361
1362 if start >= len(self.applied):
1362 if start >= len(self.applied):
1363 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1363 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1364 return
1364 return
1365
1365
1366 if not update:
1366 if not update:
1367 parents = repo.dirstate.parents()
1367 parents = repo.dirstate.parents()
1368 rr = [x.node for x in self.applied]
1368 rr = [x.node for x in self.applied]
1369 for p in parents:
1369 for p in parents:
1370 if p in rr:
1370 if p in rr:
1371 self.ui.warn(_("qpop: forcing dirstate update\n"))
1371 self.ui.warn(_("qpop: forcing dirstate update\n"))
1372 update = True
1372 update = True
1373 else:
1373 else:
1374 parents = [p.node() for p in repo[None].parents()]
1374 parents = [p.node() for p in repo[None].parents()]
1375 needupdate = False
1375 needupdate = False
1376 for entry in self.applied[start:]:
1376 for entry in self.applied[start:]:
1377 if entry.node in parents:
1377 if entry.node in parents:
1378 needupdate = True
1378 needupdate = True
1379 break
1379 break
1380 update = needupdate
1380 update = needupdate
1381
1381
1382 tobackup = set()
1382 tobackup = set()
1383 if update:
1383 if update:
1384 m, a, r, d = self.checklocalchanges(repo, force=force or check)
1384 m, a, r, d = self.checklocalchanges(repo, force=force or check)
1385 if force:
1385 if force:
1386 if not nobackup:
1386 if not nobackup:
1387 tobackup.update(m + a)
1387 tobackup.update(m + a)
1388 elif check:
1388 elif check:
1389 tobackup.update(m + a + r + d)
1389 tobackup.update(m + a + r + d)
1390
1390
1391 self.applieddirty = True
1391 self.applieddirty = True
1392 end = len(self.applied)
1392 end = len(self.applied)
1393 rev = self.applied[start].node
1393 rev = self.applied[start].node
1394 if update:
1394 if update:
1395 top = self.checktoppatch(repo)[0]
1395 top = self.checktoppatch(repo)[0]
1396
1396
1397 try:
1397 try:
1398 heads = repo.changelog.heads(rev)
1398 heads = repo.changelog.heads(rev)
1399 except error.LookupError:
1399 except error.LookupError:
1400 node = short(rev)
1400 node = short(rev)
1401 raise util.Abort(_('trying to pop unknown node %s') % node)
1401 raise util.Abort(_('trying to pop unknown node %s') % node)
1402
1402
1403 if heads != [self.applied[-1].node]:
1403 if heads != [self.applied[-1].node]:
1404 raise util.Abort(_("popping would remove a revision not "
1404 raise util.Abort(_("popping would remove a revision not "
1405 "managed by this patch queue"))
1405 "managed by this patch queue"))
1406 if not repo[self.applied[-1].node].mutable():
1406 if not repo[self.applied[-1].node].mutable():
1407 raise util.Abort(
1407 raise util.Abort(
1408 _("popping would remove an immutable revision"),
1408 _("popping would remove an immutable revision"),
1409 hint=_('see "hg help phases" for details'))
1409 hint=_('see "hg help phases" for details'))
1410
1410
1411 # we know there are no local changes, so we can make a simplified
1411 # we know there are no local changes, so we can make a simplified
1412 # form of hg.update.
1412 # form of hg.update.
1413 if update:
1413 if update:
1414 qp = self.qparents(repo, rev)
1414 qp = self.qparents(repo, rev)
1415 ctx = repo[qp]
1415 ctx = repo[qp]
1416 m, a, r, d = repo.status(qp, top)[:4]
1416 m, a, r, d = repo.status(qp, top)[:4]
1417 if d:
1417 if d:
1418 raise util.Abort(_("deletions found between repo revs"))
1418 raise util.Abort(_("deletions found between repo revs"))
1419
1419
1420 tobackup = set(a + m + r) & tobackup
1420 tobackup = set(a + m + r) & tobackup
1421 if check and tobackup:
1421 if check and tobackup:
1422 self.localchangesfound()
1422 self.localchangesfound()
1423 self.backup(repo, tobackup)
1423 self.backup(repo, tobackup)
1424
1424
1425 for f in a:
1425 for f in a:
1426 try:
1426 try:
1427 util.unlinkpath(repo.wjoin(f))
1427 util.unlinkpath(repo.wjoin(f))
1428 except OSError, e:
1428 except OSError, e:
1429 if e.errno != errno.ENOENT:
1429 if e.errno != errno.ENOENT:
1430 raise
1430 raise
1431 repo.dirstate.drop(f)
1431 repo.dirstate.drop(f)
1432 for f in m + r:
1432 for f in m + r:
1433 fctx = ctx[f]
1433 fctx = ctx[f]
1434 repo.wwrite(f, fctx.data(), fctx.flags())
1434 repo.wwrite(f, fctx.data(), fctx.flags())
1435 repo.dirstate.normal(f)
1435 repo.dirstate.normal(f)
1436 repo.setparents(qp, nullid)
1436 repo.setparents(qp, nullid)
1437 for patch in reversed(self.applied[start:end]):
1437 for patch in reversed(self.applied[start:end]):
1438 self.ui.status(_("popping %s\n") % patch.name)
1438 self.ui.status(_("popping %s\n") % patch.name)
1439 del self.applied[start:end]
1439 del self.applied[start:end]
1440 self.strip(repo, [rev], update=False, backup='strip')
1440 self.strip(repo, [rev], update=False, backup='strip')
1441 if self.applied:
1441 if self.applied:
1442 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1442 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1443 else:
1443 else:
1444 self.ui.write(_("patch queue now empty\n"))
1444 self.ui.write(_("patch queue now empty\n"))
1445 finally:
1445 finally:
1446 wlock.release()
1446 wlock.release()
1447
1447
1448 def diff(self, repo, pats, opts):
1448 def diff(self, repo, pats, opts):
1449 top, patch = self.checktoppatch(repo)
1449 top, patch = self.checktoppatch(repo)
1450 if not top:
1450 if not top:
1451 self.ui.write(_("no patches applied\n"))
1451 self.ui.write(_("no patches applied\n"))
1452 return
1452 return
1453 qp = self.qparents(repo, top)
1453 qp = self.qparents(repo, top)
1454 if opts.get('reverse'):
1454 if opts.get('reverse'):
1455 node1, node2 = None, qp
1455 node1, node2 = None, qp
1456 else:
1456 else:
1457 node1, node2 = qp, None
1457 node1, node2 = qp, None
1458 diffopts = self.diffopts(opts, patch)
1458 diffopts = self.diffopts(opts, patch)
1459 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1459 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1460
1460
1461 def refresh(self, repo, pats=None, **opts):
1461 def refresh(self, repo, pats=None, **opts):
1462 if not self.applied:
1462 if not self.applied:
1463 self.ui.write(_("no patches applied\n"))
1463 self.ui.write(_("no patches applied\n"))
1464 return 1
1464 return 1
1465 msg = opts.get('msg', '').rstrip()
1465 msg = opts.get('msg', '').rstrip()
1466 newuser = opts.get('user')
1466 newuser = opts.get('user')
1467 newdate = opts.get('date')
1467 newdate = opts.get('date')
1468 if newdate:
1468 if newdate:
1469 newdate = '%d %d' % util.parsedate(newdate)
1469 newdate = '%d %d' % util.parsedate(newdate)
1470 wlock = repo.wlock()
1470 wlock = repo.wlock()
1471
1471
1472 try:
1472 try:
1473 self.checktoppatch(repo)
1473 self.checktoppatch(repo)
1474 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1474 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1475 if repo.changelog.heads(top) != [top]:
1475 if repo.changelog.heads(top) != [top]:
1476 raise util.Abort(_("cannot refresh a revision with children"))
1476 raise util.Abort(_("cannot refresh a revision with children"))
1477 if not repo[top].mutable():
1477 if not repo[top].mutable():
1478 raise util.Abort(_("cannot refresh immutable revision"),
1478 raise util.Abort(_("cannot refresh immutable revision"),
1479 hint=_('see "hg help phases" for details'))
1479 hint=_('see "hg help phases" for details'))
1480
1480
1481 inclsubs = self.checksubstate(repo)
1481 inclsubs = self.checksubstate(repo)
1482
1482
1483 cparents = repo.changelog.parents(top)
1483 cparents = repo.changelog.parents(top)
1484 patchparent = self.qparents(repo, top)
1484 patchparent = self.qparents(repo, top)
1485 ph = patchheader(self.join(patchfn), self.plainmode)
1485 ph = patchheader(self.join(patchfn), self.plainmode)
1486 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1486 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1487 if msg:
1487 if msg:
1488 ph.setmessage(msg)
1488 ph.setmessage(msg)
1489 if newuser:
1489 if newuser:
1490 ph.setuser(newuser)
1490 ph.setuser(newuser)
1491 if newdate:
1491 if newdate:
1492 ph.setdate(newdate)
1492 ph.setdate(newdate)
1493 ph.setparent(hex(patchparent))
1493 ph.setparent(hex(patchparent))
1494
1494
1495 # only commit new patch when write is complete
1495 # only commit new patch when write is complete
1496 patchf = self.opener(patchfn, 'w', atomictemp=True)
1496 patchf = self.opener(patchfn, 'w', atomictemp=True)
1497
1497
1498 comments = str(ph)
1498 comments = str(ph)
1499 if comments:
1499 if comments:
1500 patchf.write(comments)
1500 patchf.write(comments)
1501
1501
1502 # update the dirstate in place, strip off the qtip commit
1502 # update the dirstate in place, strip off the qtip commit
1503 # and then commit.
1503 # and then commit.
1504 #
1504 #
1505 # this should really read:
1505 # this should really read:
1506 # mm, dd, aa = repo.status(top, patchparent)[:3]
1506 # mm, dd, aa = repo.status(top, patchparent)[:3]
1507 # but we do it backwards to take advantage of manifest/chlog
1507 # but we do it backwards to take advantage of manifest/chlog
1508 # caching against the next repo.status call
1508 # caching against the next repo.status call
1509 mm, aa, dd = repo.status(patchparent, top)[:3]
1509 mm, aa, dd = repo.status(patchparent, top)[:3]
1510 changes = repo.changelog.read(top)
1510 changes = repo.changelog.read(top)
1511 man = repo.manifest.read(changes[0])
1511 man = repo.manifest.read(changes[0])
1512 aaa = aa[:]
1512 aaa = aa[:]
1513 matchfn = scmutil.match(repo[None], pats, opts)
1513 matchfn = scmutil.match(repo[None], pats, opts)
1514 # in short mode, we only diff the files included in the
1514 # in short mode, we only diff the files included in the
1515 # patch already plus specified files
1515 # patch already plus specified files
1516 if opts.get('short'):
1516 if opts.get('short'):
1517 # if amending a patch, we start with existing
1517 # if amending a patch, we start with existing
1518 # files plus specified files - unfiltered
1518 # files plus specified files - unfiltered
1519 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1519 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1520 # filter with inc/exl options
1520 # filter with inc/exl options
1521 matchfn = scmutil.match(repo[None], opts=opts)
1521 matchfn = scmutil.match(repo[None], opts=opts)
1522 else:
1522 else:
1523 match = scmutil.matchall(repo)
1523 match = scmutil.matchall(repo)
1524 m, a, r, d = repo.status(match=match)[:4]
1524 m, a, r, d = repo.status(match=match)[:4]
1525 mm = set(mm)
1525 mm = set(mm)
1526 aa = set(aa)
1526 aa = set(aa)
1527 dd = set(dd)
1527 dd = set(dd)
1528
1528
1529 # we might end up with files that were added between
1529 # we might end up with files that were added between
1530 # qtip and the dirstate parent, but then changed in the
1530 # qtip and the dirstate parent, but then changed in the
1531 # local dirstate. in this case, we want them to only
1531 # local dirstate. in this case, we want them to only
1532 # show up in the added section
1532 # show up in the added section
1533 for x in m:
1533 for x in m:
1534 if x not in aa:
1534 if x not in aa:
1535 mm.add(x)
1535 mm.add(x)
1536 # we might end up with files added by the local dirstate that
1536 # we might end up with files added by the local dirstate that
1537 # were deleted by the patch. In this case, they should only
1537 # were deleted by the patch. In this case, they should only
1538 # show up in the changed section.
1538 # show up in the changed section.
1539 for x in a:
1539 for x in a:
1540 if x in dd:
1540 if x in dd:
1541 dd.remove(x)
1541 dd.remove(x)
1542 mm.add(x)
1542 mm.add(x)
1543 else:
1543 else:
1544 aa.add(x)
1544 aa.add(x)
1545 # make sure any files deleted in the local dirstate
1545 # make sure any files deleted in the local dirstate
1546 # are not in the add or change column of the patch
1546 # are not in the add or change column of the patch
1547 forget = []
1547 forget = []
1548 for x in d + r:
1548 for x in d + r:
1549 if x in aa:
1549 if x in aa:
1550 aa.remove(x)
1550 aa.remove(x)
1551 forget.append(x)
1551 forget.append(x)
1552 continue
1552 continue
1553 else:
1553 else:
1554 mm.discard(x)
1554 mm.discard(x)
1555 dd.add(x)
1555 dd.add(x)
1556
1556
1557 m = list(mm)
1557 m = list(mm)
1558 r = list(dd)
1558 r = list(dd)
1559 a = list(aa)
1559 a = list(aa)
1560 c = [filter(matchfn, l) for l in (m, a, r)]
1560 c = [filter(matchfn, l) for l in (m, a, r)]
1561 match = scmutil.matchfiles(repo, set(c[0] + c[1] + c[2] + inclsubs))
1561 match = scmutil.matchfiles(repo, set(c[0] + c[1] + c[2] + inclsubs))
1562 chunks = patchmod.diff(repo, patchparent, match=match,
1562 chunks = patchmod.diff(repo, patchparent, match=match,
1563 changes=c, opts=diffopts)
1563 changes=c, opts=diffopts)
1564 for chunk in chunks:
1564 for chunk in chunks:
1565 patchf.write(chunk)
1565 patchf.write(chunk)
1566
1566
1567 try:
1567 try:
1568 if diffopts.git or diffopts.upgrade:
1568 if diffopts.git or diffopts.upgrade:
1569 copies = {}
1569 copies = {}
1570 for dst in a:
1570 for dst in a:
1571 src = repo.dirstate.copied(dst)
1571 src = repo.dirstate.copied(dst)
1572 # during qfold, the source file for copies may
1572 # during qfold, the source file for copies may
1573 # be removed. Treat this as a simple add.
1573 # be removed. Treat this as a simple add.
1574 if src is not None and src in repo.dirstate:
1574 if src is not None and src in repo.dirstate:
1575 copies.setdefault(src, []).append(dst)
1575 copies.setdefault(src, []).append(dst)
1576 repo.dirstate.add(dst)
1576 repo.dirstate.add(dst)
1577 # remember the copies between patchparent and qtip
1577 # remember the copies between patchparent and qtip
1578 for dst in aaa:
1578 for dst in aaa:
1579 f = repo.file(dst)
1579 f = repo.file(dst)
1580 src = f.renamed(man[dst])
1580 src = f.renamed(man[dst])
1581 if src:
1581 if src:
1582 copies.setdefault(src[0], []).extend(
1582 copies.setdefault(src[0], []).extend(
1583 copies.get(dst, []))
1583 copies.get(dst, []))
1584 if dst in a:
1584 if dst in a:
1585 copies[src[0]].append(dst)
1585 copies[src[0]].append(dst)
1586 # we can't copy a file created by the patch itself
1586 # we can't copy a file created by the patch itself
1587 if dst in copies:
1587 if dst in copies:
1588 del copies[dst]
1588 del copies[dst]
1589 for src, dsts in copies.iteritems():
1589 for src, dsts in copies.iteritems():
1590 for dst in dsts:
1590 for dst in dsts:
1591 repo.dirstate.copy(src, dst)
1591 repo.dirstate.copy(src, dst)
1592 else:
1592 else:
1593 for dst in a:
1593 for dst in a:
1594 repo.dirstate.add(dst)
1594 repo.dirstate.add(dst)
1595 # Drop useless copy information
1595 # Drop useless copy information
1596 for f in list(repo.dirstate.copies()):
1596 for f in list(repo.dirstate.copies()):
1597 repo.dirstate.copy(None, f)
1597 repo.dirstate.copy(None, f)
1598 for f in r:
1598 for f in r:
1599 repo.dirstate.remove(f)
1599 repo.dirstate.remove(f)
1600 # if the patch excludes a modified file, mark that
1600 # if the patch excludes a modified file, mark that
1601 # file with mtime=0 so status can see it.
1601 # file with mtime=0 so status can see it.
1602 mm = []
1602 mm = []
1603 for i in xrange(len(m)-1, -1, -1):
1603 for i in xrange(len(m)-1, -1, -1):
1604 if not matchfn(m[i]):
1604 if not matchfn(m[i]):
1605 mm.append(m[i])
1605 mm.append(m[i])
1606 del m[i]
1606 del m[i]
1607 for f in m:
1607 for f in m:
1608 repo.dirstate.normal(f)
1608 repo.dirstate.normal(f)
1609 for f in mm:
1609 for f in mm:
1610 repo.dirstate.normallookup(f)
1610 repo.dirstate.normallookup(f)
1611 for f in forget:
1611 for f in forget:
1612 repo.dirstate.drop(f)
1612 repo.dirstate.drop(f)
1613
1613
1614 if not msg:
1614 if not msg:
1615 if not ph.message:
1615 if not ph.message:
1616 message = "[mq]: %s\n" % patchfn
1616 message = "[mq]: %s\n" % patchfn
1617 else:
1617 else:
1618 message = "\n".join(ph.message)
1618 message = "\n".join(ph.message)
1619 else:
1619 else:
1620 message = msg
1620 message = msg
1621
1621
1622 user = ph.user or changes[1]
1622 user = ph.user or changes[1]
1623
1623
1624 oldphase = repo[top].phase()
1624 oldphase = repo[top].phase()
1625
1625
1626 # assumes strip can roll itself back if interrupted
1626 # assumes strip can roll itself back if interrupted
1627 repo.setparents(*cparents)
1627 repo.setparents(*cparents)
1628 self.applied.pop()
1628 self.applied.pop()
1629 self.applieddirty = True
1629 self.applieddirty = True
1630 self.strip(repo, [top], update=False,
1630 self.strip(repo, [top], update=False,
1631 backup='strip')
1631 backup='strip')
1632 except:
1632 except:
1633 repo.dirstate.invalidate()
1633 repo.dirstate.invalidate()
1634 raise
1634 raise
1635
1635
1636 try:
1636 try:
1637 # might be nice to attempt to roll back strip after this
1637 # might be nice to attempt to roll back strip after this
1638
1638
1639 # Ensure we create a new changeset in the same phase than
1639 # Ensure we create a new changeset in the same phase than
1640 # the old one.
1640 # the old one.
1641 n = newcommit(repo, oldphase, message, user, ph.date,
1641 n = newcommit(repo, oldphase, message, user, ph.date,
1642 match=match, force=True)
1642 match=match, force=True)
1643 # only write patch after a successful commit
1643 # only write patch after a successful commit
1644 patchf.close()
1644 patchf.close()
1645 self.applied.append(statusentry(n, patchfn))
1645 self.applied.append(statusentry(n, patchfn))
1646 except:
1646 except:
1647 ctx = repo[cparents[0]]
1647 ctx = repo[cparents[0]]
1648 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1648 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1649 self.savedirty()
1649 self.savedirty()
1650 self.ui.warn(_('refresh interrupted while patch was popped! '
1650 self.ui.warn(_('refresh interrupted while patch was popped! '
1651 '(revert --all, qpush to recover)\n'))
1651 '(revert --all, qpush to recover)\n'))
1652 raise
1652 raise
1653 finally:
1653 finally:
1654 wlock.release()
1654 wlock.release()
1655 self.removeundo(repo)
1655 self.removeundo(repo)
1656
1656
1657 def init(self, repo, create=False):
1657 def init(self, repo, create=False):
1658 if not create and os.path.isdir(self.path):
1658 if not create and os.path.isdir(self.path):
1659 raise util.Abort(_("patch queue directory already exists"))
1659 raise util.Abort(_("patch queue directory already exists"))
1660 try:
1660 try:
1661 os.mkdir(self.path)
1661 os.mkdir(self.path)
1662 except OSError, inst:
1662 except OSError, inst:
1663 if inst.errno != errno.EEXIST or not create:
1663 if inst.errno != errno.EEXIST or not create:
1664 raise
1664 raise
1665 if create:
1665 if create:
1666 return self.qrepo(create=True)
1666 return self.qrepo(create=True)
1667
1667
1668 def unapplied(self, repo, patch=None):
1668 def unapplied(self, repo, patch=None):
1669 if patch and patch not in self.series:
1669 if patch and patch not in self.series:
1670 raise util.Abort(_("patch %s is not in series file") % patch)
1670 raise util.Abort(_("patch %s is not in series file") % patch)
1671 if not patch:
1671 if not patch:
1672 start = self.seriesend()
1672 start = self.seriesend()
1673 else:
1673 else:
1674 start = self.series.index(patch) + 1
1674 start = self.series.index(patch) + 1
1675 unapplied = []
1675 unapplied = []
1676 for i in xrange(start, len(self.series)):
1676 for i in xrange(start, len(self.series)):
1677 pushable, reason = self.pushable(i)
1677 pushable, reason = self.pushable(i)
1678 if pushable:
1678 if pushable:
1679 unapplied.append((i, self.series[i]))
1679 unapplied.append((i, self.series[i]))
1680 self.explainpushable(i)
1680 self.explainpushable(i)
1681 return unapplied
1681 return unapplied
1682
1682
1683 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1683 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1684 summary=False):
1684 summary=False):
1685 def displayname(pfx, patchname, state):
1685 def displayname(pfx, patchname, state):
1686 if pfx:
1686 if pfx:
1687 self.ui.write(pfx)
1687 self.ui.write(pfx)
1688 if summary:
1688 if summary:
1689 ph = patchheader(self.join(patchname), self.plainmode)
1689 ph = patchheader(self.join(patchname), self.plainmode)
1690 msg = ph.message and ph.message[0] or ''
1690 msg = ph.message and ph.message[0] or ''
1691 if self.ui.formatted():
1691 if self.ui.formatted():
1692 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1692 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1693 if width > 0:
1693 if width > 0:
1694 msg = util.ellipsis(msg, width)
1694 msg = util.ellipsis(msg, width)
1695 else:
1695 else:
1696 msg = ''
1696 msg = ''
1697 self.ui.write(patchname, label='qseries.' + state)
1697 self.ui.write(patchname, label='qseries.' + state)
1698 self.ui.write(': ')
1698 self.ui.write(': ')
1699 self.ui.write(msg, label='qseries.message.' + state)
1699 self.ui.write(msg, label='qseries.message.' + state)
1700 else:
1700 else:
1701 self.ui.write(patchname, label='qseries.' + state)
1701 self.ui.write(patchname, label='qseries.' + state)
1702 self.ui.write('\n')
1702 self.ui.write('\n')
1703
1703
1704 applied = set([p.name for p in self.applied])
1704 applied = set([p.name for p in self.applied])
1705 if length is None:
1705 if length is None:
1706 length = len(self.series) - start
1706 length = len(self.series) - start
1707 if not missing:
1707 if not missing:
1708 if self.ui.verbose:
1708 if self.ui.verbose:
1709 idxwidth = len(str(start + length - 1))
1709 idxwidth = len(str(start + length - 1))
1710 for i in xrange(start, start + length):
1710 for i in xrange(start, start + length):
1711 patch = self.series[i]
1711 patch = self.series[i]
1712 if patch in applied:
1712 if patch in applied:
1713 char, state = 'A', 'applied'
1713 char, state = 'A', 'applied'
1714 elif self.pushable(i)[0]:
1714 elif self.pushable(i)[0]:
1715 char, state = 'U', 'unapplied'
1715 char, state = 'U', 'unapplied'
1716 else:
1716 else:
1717 char, state = 'G', 'guarded'
1717 char, state = 'G', 'guarded'
1718 pfx = ''
1718 pfx = ''
1719 if self.ui.verbose:
1719 if self.ui.verbose:
1720 pfx = '%*d %s ' % (idxwidth, i, char)
1720 pfx = '%*d %s ' % (idxwidth, i, char)
1721 elif status and status != char:
1721 elif status and status != char:
1722 continue
1722 continue
1723 displayname(pfx, patch, state)
1723 displayname(pfx, patch, state)
1724 else:
1724 else:
1725 msng_list = []
1725 msng_list = []
1726 for root, dirs, files in os.walk(self.path):
1726 for root, dirs, files in os.walk(self.path):
1727 d = root[len(self.path) + 1:]
1727 d = root[len(self.path) + 1:]
1728 for f in files:
1728 for f in files:
1729 fl = os.path.join(d, f)
1729 fl = os.path.join(d, f)
1730 if (fl not in self.series and
1730 if (fl not in self.series and
1731 fl not in (self.statuspath, self.seriespath,
1731 fl not in (self.statuspath, self.seriespath,
1732 self.guardspath)
1732 self.guardspath)
1733 and not fl.startswith('.')):
1733 and not fl.startswith('.')):
1734 msng_list.append(fl)
1734 msng_list.append(fl)
1735 for x in sorted(msng_list):
1735 for x in sorted(msng_list):
1736 pfx = self.ui.verbose and ('D ') or ''
1736 pfx = self.ui.verbose and ('D ') or ''
1737 displayname(pfx, x, 'missing')
1737 displayname(pfx, x, 'missing')
1738
1738
1739 def issaveline(self, l):
1739 def issaveline(self, l):
1740 if l.name == '.hg.patches.save.line':
1740 if l.name == '.hg.patches.save.line':
1741 return True
1741 return True
1742
1742
1743 def qrepo(self, create=False):
1743 def qrepo(self, create=False):
1744 ui = self.ui.copy()
1744 ui = self.ui.copy()
1745 ui.setconfig('paths', 'default', '', overlay=False)
1745 ui.setconfig('paths', 'default', '', overlay=False)
1746 ui.setconfig('paths', 'default-push', '', overlay=False)
1746 ui.setconfig('paths', 'default-push', '', overlay=False)
1747 if create or os.path.isdir(self.join(".hg")):
1747 if create or os.path.isdir(self.join(".hg")):
1748 return hg.repository(ui, path=self.path, create=create)
1748 return hg.repository(ui, path=self.path, create=create)
1749
1749
1750 def restore(self, repo, rev, delete=None, qupdate=None):
1750 def restore(self, repo, rev, delete=None, qupdate=None):
1751 desc = repo[rev].description().strip()
1751 desc = repo[rev].description().strip()
1752 lines = desc.splitlines()
1752 lines = desc.splitlines()
1753 i = 0
1753 i = 0
1754 datastart = None
1754 datastart = None
1755 series = []
1755 series = []
1756 applied = []
1756 applied = []
1757 qpp = None
1757 qpp = None
1758 for i, line in enumerate(lines):
1758 for i, line in enumerate(lines):
1759 if line == 'Patch Data:':
1759 if line == 'Patch Data:':
1760 datastart = i + 1
1760 datastart = i + 1
1761 elif line.startswith('Dirstate:'):
1761 elif line.startswith('Dirstate:'):
1762 l = line.rstrip()
1762 l = line.rstrip()
1763 l = l[10:].split(' ')
1763 l = l[10:].split(' ')
1764 qpp = [bin(x) for x in l]
1764 qpp = [bin(x) for x in l]
1765 elif datastart is not None:
1765 elif datastart is not None:
1766 l = line.rstrip()
1766 l = line.rstrip()
1767 n, name = l.split(':', 1)
1767 n, name = l.split(':', 1)
1768 if n:
1768 if n:
1769 applied.append(statusentry(bin(n), name))
1769 applied.append(statusentry(bin(n), name))
1770 else:
1770 else:
1771 series.append(l)
1771 series.append(l)
1772 if datastart is None:
1772 if datastart is None:
1773 self.ui.warn(_("No saved patch data found\n"))
1773 self.ui.warn(_("No saved patch data found\n"))
1774 return 1
1774 return 1
1775 self.ui.warn(_("restoring status: %s\n") % lines[0])
1775 self.ui.warn(_("restoring status: %s\n") % lines[0])
1776 self.fullseries = series
1776 self.fullseries = series
1777 self.applied = applied
1777 self.applied = applied
1778 self.parseseries()
1778 self.parseseries()
1779 self.seriesdirty = True
1779 self.seriesdirty = True
1780 self.applieddirty = True
1780 self.applieddirty = True
1781 heads = repo.changelog.heads()
1781 heads = repo.changelog.heads()
1782 if delete:
1782 if delete:
1783 if rev not in heads:
1783 if rev not in heads:
1784 self.ui.warn(_("save entry has children, leaving it alone\n"))
1784 self.ui.warn(_("save entry has children, leaving it alone\n"))
1785 else:
1785 else:
1786 self.ui.warn(_("removing save entry %s\n") % short(rev))
1786 self.ui.warn(_("removing save entry %s\n") % short(rev))
1787 pp = repo.dirstate.parents()
1787 pp = repo.dirstate.parents()
1788 if rev in pp:
1788 if rev in pp:
1789 update = True
1789 update = True
1790 else:
1790 else:
1791 update = False
1791 update = False
1792 self.strip(repo, [rev], update=update, backup='strip')
1792 self.strip(repo, [rev], update=update, backup='strip')
1793 if qpp:
1793 if qpp:
1794 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1794 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1795 (short(qpp[0]), short(qpp[1])))
1795 (short(qpp[0]), short(qpp[1])))
1796 if qupdate:
1796 if qupdate:
1797 self.ui.status(_("updating queue directory\n"))
1797 self.ui.status(_("updating queue directory\n"))
1798 r = self.qrepo()
1798 r = self.qrepo()
1799 if not r:
1799 if not r:
1800 self.ui.warn(_("Unable to load queue repository\n"))
1800 self.ui.warn(_("Unable to load queue repository\n"))
1801 return 1
1801 return 1
1802 hg.clean(r, qpp[0])
1802 hg.clean(r, qpp[0])
1803
1803
1804 def save(self, repo, msg=None):
1804 def save(self, repo, msg=None):
1805 if not self.applied:
1805 if not self.applied:
1806 self.ui.warn(_("save: no patches applied, exiting\n"))
1806 self.ui.warn(_("save: no patches applied, exiting\n"))
1807 return 1
1807 return 1
1808 if self.issaveline(self.applied[-1]):
1808 if self.issaveline(self.applied[-1]):
1809 self.ui.warn(_("status is already saved\n"))
1809 self.ui.warn(_("status is already saved\n"))
1810 return 1
1810 return 1
1811
1811
1812 if not msg:
1812 if not msg:
1813 msg = _("hg patches saved state")
1813 msg = _("hg patches saved state")
1814 else:
1814 else:
1815 msg = "hg patches: " + msg.rstrip('\r\n')
1815 msg = "hg patches: " + msg.rstrip('\r\n')
1816 r = self.qrepo()
1816 r = self.qrepo()
1817 if r:
1817 if r:
1818 pp = r.dirstate.parents()
1818 pp = r.dirstate.parents()
1819 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1819 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1820 msg += "\n\nPatch Data:\n"
1820 msg += "\n\nPatch Data:\n"
1821 msg += ''.join('%s\n' % x for x in self.applied)
1821 msg += ''.join('%s\n' % x for x in self.applied)
1822 msg += ''.join(':%s\n' % x for x in self.fullseries)
1822 msg += ''.join(':%s\n' % x for x in self.fullseries)
1823 n = repo.commit(msg, force=True)
1823 n = repo.commit(msg, force=True)
1824 if not n:
1824 if not n:
1825 self.ui.warn(_("repo commit failed\n"))
1825 self.ui.warn(_("repo commit failed\n"))
1826 return 1
1826 return 1
1827 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1827 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1828 self.applieddirty = True
1828 self.applieddirty = True
1829 self.removeundo(repo)
1829 self.removeundo(repo)
1830
1830
1831 def fullseriesend(self):
1831 def fullseriesend(self):
1832 if self.applied:
1832 if self.applied:
1833 p = self.applied[-1].name
1833 p = self.applied[-1].name
1834 end = self.findseries(p)
1834 end = self.findseries(p)
1835 if end is None:
1835 if end is None:
1836 return len(self.fullseries)
1836 return len(self.fullseries)
1837 return end + 1
1837 return end + 1
1838 return 0
1838 return 0
1839
1839
1840 def seriesend(self, all_patches=False):
1840 def seriesend(self, all_patches=False):
1841 """If all_patches is False, return the index of the next pushable patch
1841 """If all_patches is False, return the index of the next pushable patch
1842 in the series, or the series length. If all_patches is True, return the
1842 in the series, or the series length. If all_patches is True, return the
1843 index of the first patch past the last applied one.
1843 index of the first patch past the last applied one.
1844 """
1844 """
1845 end = 0
1845 end = 0
1846 def next(start):
1846 def next(start):
1847 if all_patches or start >= len(self.series):
1847 if all_patches or start >= len(self.series):
1848 return start
1848 return start
1849 for i in xrange(start, len(self.series)):
1849 for i in xrange(start, len(self.series)):
1850 p, reason = self.pushable(i)
1850 p, reason = self.pushable(i)
1851 if p:
1851 if p:
1852 return i
1852 return i
1853 self.explainpushable(i)
1853 self.explainpushable(i)
1854 return len(self.series)
1854 return len(self.series)
1855 if self.applied:
1855 if self.applied:
1856 p = self.applied[-1].name
1856 p = self.applied[-1].name
1857 try:
1857 try:
1858 end = self.series.index(p)
1858 end = self.series.index(p)
1859 except ValueError:
1859 except ValueError:
1860 return 0
1860 return 0
1861 return next(end + 1)
1861 return next(end + 1)
1862 return next(end)
1862 return next(end)
1863
1863
1864 def appliedname(self, index):
1864 def appliedname(self, index):
1865 pname = self.applied[index].name
1865 pname = self.applied[index].name
1866 if not self.ui.verbose:
1866 if not self.ui.verbose:
1867 p = pname
1867 p = pname
1868 else:
1868 else:
1869 p = str(self.series.index(pname)) + " " + pname
1869 p = str(self.series.index(pname)) + " " + pname
1870 return p
1870 return p
1871
1871
1872 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1872 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1873 force=None, git=False):
1873 force=None, git=False):
1874 def checkseries(patchname):
1874 def checkseries(patchname):
1875 if patchname in self.series:
1875 if patchname in self.series:
1876 raise util.Abort(_('patch %s is already in the series file')
1876 raise util.Abort(_('patch %s is already in the series file')
1877 % patchname)
1877 % patchname)
1878
1878
1879 if rev:
1879 if rev:
1880 if files:
1880 if files:
1881 raise util.Abort(_('option "-r" not valid when importing '
1881 raise util.Abort(_('option "-r" not valid when importing '
1882 'files'))
1882 'files'))
1883 rev = scmutil.revrange(repo, rev)
1883 rev = scmutil.revrange(repo, rev)
1884 rev.sort(reverse=True)
1884 rev.sort(reverse=True)
1885 if (len(files) > 1 or len(rev) > 1) and patchname:
1885 if (len(files) > 1 or len(rev) > 1) and patchname:
1886 raise util.Abort(_('option "-n" not valid when importing multiple '
1886 raise util.Abort(_('option "-n" not valid when importing multiple '
1887 'patches'))
1887 'patches'))
1888 imported = []
1888 imported = []
1889 if rev:
1889 if rev:
1890 # If mq patches are applied, we can only import revisions
1890 # If mq patches are applied, we can only import revisions
1891 # that form a linear path to qbase.
1891 # that form a linear path to qbase.
1892 # Otherwise, they should form a linear path to a head.
1892 # Otherwise, they should form a linear path to a head.
1893 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1893 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1894 if len(heads) > 1:
1894 if len(heads) > 1:
1895 raise util.Abort(_('revision %d is the root of more than one '
1895 raise util.Abort(_('revision %d is the root of more than one '
1896 'branch') % rev[-1])
1896 'branch') % rev[-1])
1897 if self.applied:
1897 if self.applied:
1898 base = repo.changelog.node(rev[0])
1898 base = repo.changelog.node(rev[0])
1899 if base in [n.node for n in self.applied]:
1899 if base in [n.node for n in self.applied]:
1900 raise util.Abort(_('revision %d is already managed')
1900 raise util.Abort(_('revision %d is already managed')
1901 % rev[0])
1901 % rev[0])
1902 if heads != [self.applied[-1].node]:
1902 if heads != [self.applied[-1].node]:
1903 raise util.Abort(_('revision %d is not the parent of '
1903 raise util.Abort(_('revision %d is not the parent of '
1904 'the queue') % rev[0])
1904 'the queue') % rev[0])
1905 base = repo.changelog.rev(self.applied[0].node)
1905 base = repo.changelog.rev(self.applied[0].node)
1906 lastparent = repo.changelog.parentrevs(base)[0]
1906 lastparent = repo.changelog.parentrevs(base)[0]
1907 else:
1907 else:
1908 if heads != [repo.changelog.node(rev[0])]:
1908 if heads != [repo.changelog.node(rev[0])]:
1909 raise util.Abort(_('revision %d has unmanaged children')
1909 raise util.Abort(_('revision %d has unmanaged children')
1910 % rev[0])
1910 % rev[0])
1911 lastparent = None
1911 lastparent = None
1912
1912
1913 diffopts = self.diffopts({'git': git})
1913 diffopts = self.diffopts({'git': git})
1914 for r in rev:
1914 for r in rev:
1915 if not repo[r].mutable():
1915 if not repo[r].mutable():
1916 raise util.Abort(_('revision %d is not mutable') % r,
1916 raise util.Abort(_('revision %d is not mutable') % r,
1917 hint=_('see "hg help phases" for details'))
1917 hint=_('see "hg help phases" for details'))
1918 p1, p2 = repo.changelog.parentrevs(r)
1918 p1, p2 = repo.changelog.parentrevs(r)
1919 n = repo.changelog.node(r)
1919 n = repo.changelog.node(r)
1920 if p2 != nullrev:
1920 if p2 != nullrev:
1921 raise util.Abort(_('cannot import merge revision %d') % r)
1921 raise util.Abort(_('cannot import merge revision %d') % r)
1922 if lastparent and lastparent != r:
1922 if lastparent and lastparent != r:
1923 raise util.Abort(_('revision %d is not the parent of %d')
1923 raise util.Abort(_('revision %d is not the parent of %d')
1924 % (r, lastparent))
1924 % (r, lastparent))
1925 lastparent = p1
1925 lastparent = p1
1926
1926
1927 if not patchname:
1927 if not patchname:
1928 patchname = normname('%d.diff' % r)
1928 patchname = normname('%d.diff' % r)
1929 checkseries(patchname)
1929 checkseries(patchname)
1930 self.checkpatchname(patchname, force)
1930 self.checkpatchname(patchname, force)
1931 self.fullseries.insert(0, patchname)
1931 self.fullseries.insert(0, patchname)
1932
1932
1933 patchf = self.opener(patchname, "w")
1933 patchf = self.opener(patchname, "w")
1934 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1934 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1935 patchf.close()
1935 patchf.close()
1936
1936
1937 se = statusentry(n, patchname)
1937 se = statusentry(n, patchname)
1938 self.applied.insert(0, se)
1938 self.applied.insert(0, se)
1939
1939
1940 self.added.append(patchname)
1940 self.added.append(patchname)
1941 imported.append(patchname)
1941 imported.append(patchname)
1942 patchname = None
1942 patchname = None
1943 if rev and repo.ui.configbool('mq', 'secret', False):
1943 if rev and repo.ui.configbool('mq', 'secret', False):
1944 # if we added anything with --rev, we must move the secret root
1944 # if we added anything with --rev, we must move the secret root
1945 phases.retractboundary(repo, phases.secret, [n])
1945 phases.retractboundary(repo, phases.secret, [n])
1946 self.parseseries()
1946 self.parseseries()
1947 self.applieddirty = True
1947 self.applieddirty = True
1948 self.seriesdirty = True
1948 self.seriesdirty = True
1949
1949
1950 for i, filename in enumerate(files):
1950 for i, filename in enumerate(files):
1951 if existing:
1951 if existing:
1952 if filename == '-':
1952 if filename == '-':
1953 raise util.Abort(_('-e is incompatible with import from -'))
1953 raise util.Abort(_('-e is incompatible with import from -'))
1954 filename = normname(filename)
1954 filename = normname(filename)
1955 self.checkreservedname(filename)
1955 self.checkreservedname(filename)
1956 originpath = self.join(filename)
1956 originpath = self.join(filename)
1957 if not os.path.isfile(originpath):
1957 if not os.path.isfile(originpath):
1958 raise util.Abort(_("patch %s does not exist") % filename)
1958 raise util.Abort(_("patch %s does not exist") % filename)
1959
1959
1960 if patchname:
1960 if patchname:
1961 self.checkpatchname(patchname, force)
1961 self.checkpatchname(patchname, force)
1962
1962
1963 self.ui.write(_('renaming %s to %s\n')
1963 self.ui.write(_('renaming %s to %s\n')
1964 % (filename, patchname))
1964 % (filename, patchname))
1965 util.rename(originpath, self.join(patchname))
1965 util.rename(originpath, self.join(patchname))
1966 else:
1966 else:
1967 patchname = filename
1967 patchname = filename
1968
1968
1969 else:
1969 else:
1970 if filename == '-' and not patchname:
1970 if filename == '-' and not patchname:
1971 raise util.Abort(_('need --name to import a patch from -'))
1971 raise util.Abort(_('need --name to import a patch from -'))
1972 elif not patchname:
1972 elif not patchname:
1973 patchname = normname(os.path.basename(filename.rstrip('/')))
1973 patchname = normname(os.path.basename(filename.rstrip('/')))
1974 self.checkpatchname(patchname, force)
1974 self.checkpatchname(patchname, force)
1975 try:
1975 try:
1976 if filename == '-':
1976 if filename == '-':
1977 text = self.ui.fin.read()
1977 text = self.ui.fin.read()
1978 else:
1978 else:
1979 fp = url.open(self.ui, filename)
1979 fp = url.open(self.ui, filename)
1980 text = fp.read()
1980 text = fp.read()
1981 fp.close()
1981 fp.close()
1982 except (OSError, IOError):
1982 except (OSError, IOError):
1983 raise util.Abort(_("unable to read file %s") % filename)
1983 raise util.Abort(_("unable to read file %s") % filename)
1984 patchf = self.opener(patchname, "w")
1984 patchf = self.opener(patchname, "w")
1985 patchf.write(text)
1985 patchf.write(text)
1986 patchf.close()
1986 patchf.close()
1987 if not force:
1987 if not force:
1988 checkseries(patchname)
1988 checkseries(patchname)
1989 if patchname not in self.series:
1989 if patchname not in self.series:
1990 index = self.fullseriesend() + i
1990 index = self.fullseriesend() + i
1991 self.fullseries[index:index] = [patchname]
1991 self.fullseries[index:index] = [patchname]
1992 self.parseseries()
1992 self.parseseries()
1993 self.seriesdirty = True
1993 self.seriesdirty = True
1994 self.ui.warn(_("adding %s to series file\n") % patchname)
1994 self.ui.warn(_("adding %s to series file\n") % patchname)
1995 self.added.append(patchname)
1995 self.added.append(patchname)
1996 imported.append(patchname)
1996 imported.append(patchname)
1997 patchname = None
1997 patchname = None
1998
1998
1999 self.removeundo(repo)
1999 self.removeundo(repo)
2000 return imported
2000 return imported
2001
2001
2002 def fixcheckopts(ui, opts):
2002 def fixcheckopts(ui, opts):
2003 if (not ui.configbool('mq', 'check') or opts.get('force')
2003 if (not ui.configbool('mq', 'check') or opts.get('force')
2004 or opts.get('exact')):
2004 or opts.get('exact')):
2005 return opts
2005 return opts
2006 opts = dict(opts)
2006 opts = dict(opts)
2007 opts['check'] = True
2007 opts['check'] = True
2008 return opts
2008 return opts
2009
2009
2010 @command("qdelete|qremove|qrm",
2010 @command("qdelete|qremove|qrm",
2011 [('k', 'keep', None, _('keep patch file')),
2011 [('k', 'keep', None, _('keep patch file')),
2012 ('r', 'rev', [],
2012 ('r', 'rev', [],
2013 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2013 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2014 _('hg qdelete [-k] [PATCH]...'))
2014 _('hg qdelete [-k] [PATCH]...'))
2015 def delete(ui, repo, *patches, **opts):
2015 def delete(ui, repo, *patches, **opts):
2016 """remove patches from queue
2016 """remove patches from queue
2017
2017
2018 The patches must not be applied, and at least one patch is required. Exact
2018 The patches must not be applied, and at least one patch is required. Exact
2019 patch identifiers must be given. With -k/--keep, the patch files are
2019 patch identifiers must be given. With -k/--keep, the patch files are
2020 preserved in the patch directory.
2020 preserved in the patch directory.
2021
2021
2022 To stop managing a patch and move it into permanent history,
2022 To stop managing a patch and move it into permanent history,
2023 use the :hg:`qfinish` command."""
2023 use the :hg:`qfinish` command."""
2024 q = repo.mq
2024 q = repo.mq
2025 q.delete(repo, patches, opts)
2025 q.delete(repo, patches, opts)
2026 q.savedirty()
2026 q.savedirty()
2027 return 0
2027 return 0
2028
2028
2029 @command("qapplied",
2029 @command("qapplied",
2030 [('1', 'last', None, _('show only the preceding applied patch'))
2030 [('1', 'last', None, _('show only the preceding applied patch'))
2031 ] + seriesopts,
2031 ] + seriesopts,
2032 _('hg qapplied [-1] [-s] [PATCH]'))
2032 _('hg qapplied [-1] [-s] [PATCH]'))
2033 def applied(ui, repo, patch=None, **opts):
2033 def applied(ui, repo, patch=None, **opts):
2034 """print the patches already applied
2034 """print the patches already applied
2035
2035
2036 Returns 0 on success."""
2036 Returns 0 on success."""
2037
2037
2038 q = repo.mq
2038 q = repo.mq
2039
2039
2040 if patch:
2040 if patch:
2041 if patch not in q.series:
2041 if patch not in q.series:
2042 raise util.Abort(_("patch %s is not in series file") % patch)
2042 raise util.Abort(_("patch %s is not in series file") % patch)
2043 end = q.series.index(patch) + 1
2043 end = q.series.index(patch) + 1
2044 else:
2044 else:
2045 end = q.seriesend(True)
2045 end = q.seriesend(True)
2046
2046
2047 if opts.get('last') and not end:
2047 if opts.get('last') and not end:
2048 ui.write(_("no patches applied\n"))
2048 ui.write(_("no patches applied\n"))
2049 return 1
2049 return 1
2050 elif opts.get('last') and end == 1:
2050 elif opts.get('last') and end == 1:
2051 ui.write(_("only one patch applied\n"))
2051 ui.write(_("only one patch applied\n"))
2052 return 1
2052 return 1
2053 elif opts.get('last'):
2053 elif opts.get('last'):
2054 start = end - 2
2054 start = end - 2
2055 end = 1
2055 end = 1
2056 else:
2056 else:
2057 start = 0
2057 start = 0
2058
2058
2059 q.qseries(repo, length=end, start=start, status='A',
2059 q.qseries(repo, length=end, start=start, status='A',
2060 summary=opts.get('summary'))
2060 summary=opts.get('summary'))
2061
2061
2062
2062
2063 @command("qunapplied",
2063 @command("qunapplied",
2064 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2064 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2065 _('hg qunapplied [-1] [-s] [PATCH]'))
2065 _('hg qunapplied [-1] [-s] [PATCH]'))
2066 def unapplied(ui, repo, patch=None, **opts):
2066 def unapplied(ui, repo, patch=None, **opts):
2067 """print the patches not yet applied
2067 """print the patches not yet applied
2068
2068
2069 Returns 0 on success."""
2069 Returns 0 on success."""
2070
2070
2071 q = repo.mq
2071 q = repo.mq
2072 if patch:
2072 if patch:
2073 if patch not in q.series:
2073 if patch not in q.series:
2074 raise util.Abort(_("patch %s is not in series file") % patch)
2074 raise util.Abort(_("patch %s is not in series file") % patch)
2075 start = q.series.index(patch) + 1
2075 start = q.series.index(patch) + 1
2076 else:
2076 else:
2077 start = q.seriesend(True)
2077 start = q.seriesend(True)
2078
2078
2079 if start == len(q.series) and opts.get('first'):
2079 if start == len(q.series) and opts.get('first'):
2080 ui.write(_("all patches applied\n"))
2080 ui.write(_("all patches applied\n"))
2081 return 1
2081 return 1
2082
2082
2083 length = opts.get('first') and 1 or None
2083 length = opts.get('first') and 1 or None
2084 q.qseries(repo, start=start, length=length, status='U',
2084 q.qseries(repo, start=start, length=length, status='U',
2085 summary=opts.get('summary'))
2085 summary=opts.get('summary'))
2086
2086
2087 @command("qimport",
2087 @command("qimport",
2088 [('e', 'existing', None, _('import file in patch directory')),
2088 [('e', 'existing', None, _('import file in patch directory')),
2089 ('n', 'name', '',
2089 ('n', 'name', '',
2090 _('name of patch file'), _('NAME')),
2090 _('name of patch file'), _('NAME')),
2091 ('f', 'force', None, _('overwrite existing files')),
2091 ('f', 'force', None, _('overwrite existing files')),
2092 ('r', 'rev', [],
2092 ('r', 'rev', [],
2093 _('place existing revisions under mq control'), _('REV')),
2093 _('place existing revisions under mq control'), _('REV')),
2094 ('g', 'git', None, _('use git extended diff format')),
2094 ('g', 'git', None, _('use git extended diff format')),
2095 ('P', 'push', None, _('qpush after importing'))],
2095 ('P', 'push', None, _('qpush after importing'))],
2096 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...'))
2096 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...'))
2097 def qimport(ui, repo, *filename, **opts):
2097 def qimport(ui, repo, *filename, **opts):
2098 """import a patch or existing changeset
2098 """import a patch or existing changeset
2099
2099
2100 The patch is inserted into the series after the last applied
2100 The patch is inserted into the series after the last applied
2101 patch. If no patches have been applied, qimport prepends the patch
2101 patch. If no patches have been applied, qimport prepends the patch
2102 to the series.
2102 to the series.
2103
2103
2104 The patch will have the same name as its source file unless you
2104 The patch will have the same name as its source file unless you
2105 give it a new one with -n/--name.
2105 give it a new one with -n/--name.
2106
2106
2107 You can register an existing patch inside the patch directory with
2107 You can register an existing patch inside the patch directory with
2108 the -e/--existing flag.
2108 the -e/--existing flag.
2109
2109
2110 With -f/--force, an existing patch of the same name will be
2110 With -f/--force, an existing patch of the same name will be
2111 overwritten.
2111 overwritten.
2112
2112
2113 An existing changeset may be placed under mq control with -r/--rev
2113 An existing changeset may be placed under mq control with -r/--rev
2114 (e.g. qimport --rev tip -n patch will place tip under mq control).
2114 (e.g. qimport --rev tip -n patch will place tip under mq control).
2115 With -g/--git, patches imported with --rev will use the git diff
2115 With -g/--git, patches imported with --rev will use the git diff
2116 format. See the diffs help topic for information on why this is
2116 format. See the diffs help topic for information on why this is
2117 important for preserving rename/copy information and permission
2117 important for preserving rename/copy information and permission
2118 changes. Use :hg:`qfinish` to remove changesets from mq control.
2118 changes. Use :hg:`qfinish` to remove changesets from mq control.
2119
2119
2120 To import a patch from standard input, pass - as the patch file.
2120 To import a patch from standard input, pass - as the patch file.
2121 When importing from standard input, a patch name must be specified
2121 When importing from standard input, a patch name must be specified
2122 using the --name flag.
2122 using the --name flag.
2123
2123
2124 To import an existing patch while renaming it::
2124 To import an existing patch while renaming it::
2125
2125
2126 hg qimport -e existing-patch -n new-name
2126 hg qimport -e existing-patch -n new-name
2127
2127
2128 Returns 0 if import succeeded.
2128 Returns 0 if import succeeded.
2129 """
2129 """
2130 lock = repo.lock() # cause this may move phase
2130 lock = repo.lock() # cause this may move phase
2131 try:
2131 try:
2132 q = repo.mq
2132 q = repo.mq
2133 try:
2133 try:
2134 imported = q.qimport(
2134 imported = q.qimport(
2135 repo, filename, patchname=opts.get('name'),
2135 repo, filename, patchname=opts.get('name'),
2136 existing=opts.get('existing'), force=opts.get('force'),
2136 existing=opts.get('existing'), force=opts.get('force'),
2137 rev=opts.get('rev'), git=opts.get('git'))
2137 rev=opts.get('rev'), git=opts.get('git'))
2138 finally:
2138 finally:
2139 q.savedirty()
2139 q.savedirty()
2140
2140
2141
2141
2142 if imported and opts.get('push') and not opts.get('rev'):
2142 if imported and opts.get('push') and not opts.get('rev'):
2143 return q.push(repo, imported[-1])
2143 return q.push(repo, imported[-1])
2144 finally:
2144 finally:
2145 lock.release()
2145 lock.release()
2146 return 0
2146 return 0
2147
2147
2148 def qinit(ui, repo, create):
2148 def qinit(ui, repo, create):
2149 """initialize a new queue repository
2149 """initialize a new queue repository
2150
2150
2151 This command also creates a series file for ordering patches, and
2151 This command also creates a series file for ordering patches, and
2152 an mq-specific .hgignore file in the queue repository, to exclude
2152 an mq-specific .hgignore file in the queue repository, to exclude
2153 the status and guards files (these contain mostly transient state).
2153 the status and guards files (these contain mostly transient state).
2154
2154
2155 Returns 0 if initialization succeeded."""
2155 Returns 0 if initialization succeeded."""
2156 q = repo.mq
2156 q = repo.mq
2157 r = q.init(repo, create)
2157 r = q.init(repo, create)
2158 q.savedirty()
2158 q.savedirty()
2159 if r:
2159 if r:
2160 if not os.path.exists(r.wjoin('.hgignore')):
2160 if not os.path.exists(r.wjoin('.hgignore')):
2161 fp = r.wopener('.hgignore', 'w')
2161 fp = r.wopener('.hgignore', 'w')
2162 fp.write('^\\.hg\n')
2162 fp.write('^\\.hg\n')
2163 fp.write('^\\.mq\n')
2163 fp.write('^\\.mq\n')
2164 fp.write('syntax: glob\n')
2164 fp.write('syntax: glob\n')
2165 fp.write('status\n')
2165 fp.write('status\n')
2166 fp.write('guards\n')
2166 fp.write('guards\n')
2167 fp.close()
2167 fp.close()
2168 if not os.path.exists(r.wjoin('series')):
2168 if not os.path.exists(r.wjoin('series')):
2169 r.wopener('series', 'w').close()
2169 r.wopener('series', 'w').close()
2170 r[None].add(['.hgignore', 'series'])
2170 r[None].add(['.hgignore', 'series'])
2171 commands.add(ui, r)
2171 commands.add(ui, r)
2172 return 0
2172 return 0
2173
2173
2174 @command("^qinit",
2174 @command("^qinit",
2175 [('c', 'create-repo', None, _('create queue repository'))],
2175 [('c', 'create-repo', None, _('create queue repository'))],
2176 _('hg qinit [-c]'))
2176 _('hg qinit [-c]'))
2177 def init(ui, repo, **opts):
2177 def init(ui, repo, **opts):
2178 """init a new queue repository (DEPRECATED)
2178 """init a new queue repository (DEPRECATED)
2179
2179
2180 The queue repository is unversioned by default. If
2180 The queue repository is unversioned by default. If
2181 -c/--create-repo is specified, qinit will create a separate nested
2181 -c/--create-repo is specified, qinit will create a separate nested
2182 repository for patches (qinit -c may also be run later to convert
2182 repository for patches (qinit -c may also be run later to convert
2183 an unversioned patch repository into a versioned one). You can use
2183 an unversioned patch repository into a versioned one). You can use
2184 qcommit to commit changes to this queue repository.
2184 qcommit to commit changes to this queue repository.
2185
2185
2186 This command is deprecated. Without -c, it's implied by other relevant
2186 This command is deprecated. Without -c, it's implied by other relevant
2187 commands. With -c, use :hg:`init --mq` instead."""
2187 commands. With -c, use :hg:`init --mq` instead."""
2188 return qinit(ui, repo, create=opts.get('create_repo'))
2188 return qinit(ui, repo, create=opts.get('create_repo'))
2189
2189
2190 @command("qclone",
2190 @command("qclone",
2191 [('', 'pull', None, _('use pull protocol to copy metadata')),
2191 [('', 'pull', None, _('use pull protocol to copy metadata')),
2192 ('U', 'noupdate', None,
2192 ('U', 'noupdate', None,
2193 _('do not update the new working directories')),
2193 _('do not update the new working directories')),
2194 ('', 'uncompressed', None,
2194 ('', 'uncompressed', None,
2195 _('use uncompressed transfer (fast over LAN)')),
2195 _('use uncompressed transfer (fast over LAN)')),
2196 ('p', 'patches', '',
2196 ('p', 'patches', '',
2197 _('location of source patch repository'), _('REPO')),
2197 _('location of source patch repository'), _('REPO')),
2198 ] + commands.remoteopts,
2198 ] + commands.remoteopts,
2199 _('hg qclone [OPTION]... SOURCE [DEST]'))
2199 _('hg qclone [OPTION]... SOURCE [DEST]'))
2200 def clone(ui, source, dest=None, **opts):
2200 def clone(ui, source, dest=None, **opts):
2201 '''clone main and patch repository at same time
2201 '''clone main and patch repository at same time
2202
2202
2203 If source is local, destination will have no patches applied. If
2203 If source is local, destination will have no patches applied. If
2204 source is remote, this command can not check if patches are
2204 source is remote, this command can not check if patches are
2205 applied in source, so cannot guarantee that patches are not
2205 applied in source, so cannot guarantee that patches are not
2206 applied in destination. If you clone remote repository, be sure
2206 applied in destination. If you clone remote repository, be sure
2207 before that it has no patches applied.
2207 before that it has no patches applied.
2208
2208
2209 Source patch repository is looked for in <src>/.hg/patches by
2209 Source patch repository is looked for in <src>/.hg/patches by
2210 default. Use -p <url> to change.
2210 default. Use -p <url> to change.
2211
2211
2212 The patch directory must be a nested Mercurial repository, as
2212 The patch directory must be a nested Mercurial repository, as
2213 would be created by :hg:`init --mq`.
2213 would be created by :hg:`init --mq`.
2214
2214
2215 Return 0 on success.
2215 Return 0 on success.
2216 '''
2216 '''
2217 def patchdir(repo):
2217 def patchdir(repo):
2218 """compute a patch repo url from a repo object"""
2218 """compute a patch repo url from a repo object"""
2219 url = repo.url()
2219 url = repo.url()
2220 if url.endswith('/'):
2220 if url.endswith('/'):
2221 url = url[:-1]
2221 url = url[:-1]
2222 return url + '/.hg/patches'
2222 return url + '/.hg/patches'
2223
2223
2224 # main repo (destination and sources)
2224 # main repo (destination and sources)
2225 if dest is None:
2225 if dest is None:
2226 dest = hg.defaultdest(source)
2226 dest = hg.defaultdest(source)
2227 sr = hg.repository(hg.remoteui(ui, opts), ui.expandpath(source))
2227 sr = hg.repository(hg.remoteui(ui, opts), ui.expandpath(source))
2228
2228
2229 # patches repo (source only)
2229 # patches repo (source only)
2230 if opts.get('patches'):
2230 if opts.get('patches'):
2231 patchespath = ui.expandpath(opts.get('patches'))
2231 patchespath = ui.expandpath(opts.get('patches'))
2232 else:
2232 else:
2233 patchespath = patchdir(sr)
2233 patchespath = patchdir(sr)
2234 try:
2234 try:
2235 hg.repository(ui, patchespath)
2235 hg.repository(ui, patchespath)
2236 except error.RepoError:
2236 except error.RepoError:
2237 raise util.Abort(_('versioned patch repository not found'
2237 raise util.Abort(_('versioned patch repository not found'
2238 ' (see init --mq)'))
2238 ' (see init --mq)'))
2239 qbase, destrev = None, None
2239 qbase, destrev = None, None
2240 if sr.local():
2240 if sr.local():
2241 if sr.mq.applied and sr[qbase].phase() != phases.secret:
2241 if sr.mq.applied and sr[qbase].phase() != phases.secret:
2242 qbase = sr.mq.applied[0].node
2242 qbase = sr.mq.applied[0].node
2243 if not hg.islocal(dest):
2243 if not hg.islocal(dest):
2244 heads = set(sr.heads())
2244 heads = set(sr.heads())
2245 destrev = list(heads.difference(sr.heads(qbase)))
2245 destrev = list(heads.difference(sr.heads(qbase)))
2246 destrev.append(sr.changelog.parents(qbase)[0])
2246 destrev.append(sr.changelog.parents(qbase)[0])
2247 elif sr.capable('lookup'):
2247 elif sr.capable('lookup'):
2248 try:
2248 try:
2249 qbase = sr.lookup('qbase')
2249 qbase = sr.lookup('qbase')
2250 except error.RepoError:
2250 except error.RepoError:
2251 pass
2251 pass
2252
2252
2253 ui.note(_('cloning main repository\n'))
2253 ui.note(_('cloning main repository\n'))
2254 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2254 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2255 pull=opts.get('pull'),
2255 pull=opts.get('pull'),
2256 rev=destrev,
2256 rev=destrev,
2257 update=False,
2257 update=False,
2258 stream=opts.get('uncompressed'))
2258 stream=opts.get('uncompressed'))
2259
2259
2260 ui.note(_('cloning patch repository\n'))
2260 ui.note(_('cloning patch repository\n'))
2261 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2261 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2262 pull=opts.get('pull'), update=not opts.get('noupdate'),
2262 pull=opts.get('pull'), update=not opts.get('noupdate'),
2263 stream=opts.get('uncompressed'))
2263 stream=opts.get('uncompressed'))
2264
2264
2265 if dr.local():
2265 if dr.local():
2266 if qbase:
2266 if qbase:
2267 ui.note(_('stripping applied patches from destination '
2267 ui.note(_('stripping applied patches from destination '
2268 'repository\n'))
2268 'repository\n'))
2269 dr.mq.strip(dr, [qbase], update=False, backup=None)
2269 dr.mq.strip(dr, [qbase], update=False, backup=None)
2270 if not opts.get('noupdate'):
2270 if not opts.get('noupdate'):
2271 ui.note(_('updating destination repository\n'))
2271 ui.note(_('updating destination repository\n'))
2272 hg.update(dr, dr.changelog.tip())
2272 hg.update(dr, dr.changelog.tip())
2273
2273
2274 @command("qcommit|qci",
2274 @command("qcommit|qci",
2275 commands.table["^commit|ci"][1],
2275 commands.table["^commit|ci"][1],
2276 _('hg qcommit [OPTION]... [FILE]...'))
2276 _('hg qcommit [OPTION]... [FILE]...'))
2277 def commit(ui, repo, *pats, **opts):
2277 def commit(ui, repo, *pats, **opts):
2278 """commit changes in the queue repository (DEPRECATED)
2278 """commit changes in the queue repository (DEPRECATED)
2279
2279
2280 This command is deprecated; use :hg:`commit --mq` instead."""
2280 This command is deprecated; use :hg:`commit --mq` instead."""
2281 q = repo.mq
2281 q = repo.mq
2282 r = q.qrepo()
2282 r = q.qrepo()
2283 if not r:
2283 if not r:
2284 raise util.Abort('no queue repository')
2284 raise util.Abort('no queue repository')
2285 commands.commit(r.ui, r, *pats, **opts)
2285 commands.commit(r.ui, r, *pats, **opts)
2286
2286
2287 @command("qseries",
2287 @command("qseries",
2288 [('m', 'missing', None, _('print patches not in series')),
2288 [('m', 'missing', None, _('print patches not in series')),
2289 ] + seriesopts,
2289 ] + seriesopts,
2290 _('hg qseries [-ms]'))
2290 _('hg qseries [-ms]'))
2291 def series(ui, repo, **opts):
2291 def series(ui, repo, **opts):
2292 """print the entire series file
2292 """print the entire series file
2293
2293
2294 Returns 0 on success."""
2294 Returns 0 on success."""
2295 repo.mq.qseries(repo, missing=opts.get('missing'),
2295 repo.mq.qseries(repo, missing=opts.get('missing'),
2296 summary=opts.get('summary'))
2296 summary=opts.get('summary'))
2297 return 0
2297 return 0
2298
2298
2299 @command("qtop", seriesopts, _('hg qtop [-s]'))
2299 @command("qtop", seriesopts, _('hg qtop [-s]'))
2300 def top(ui, repo, **opts):
2300 def top(ui, repo, **opts):
2301 """print the name of the current patch
2301 """print the name of the current patch
2302
2302
2303 Returns 0 on success."""
2303 Returns 0 on success."""
2304 q = repo.mq
2304 q = repo.mq
2305 t = q.applied and q.seriesend(True) or 0
2305 t = q.applied and q.seriesend(True) or 0
2306 if t:
2306 if t:
2307 q.qseries(repo, start=t - 1, length=1, status='A',
2307 q.qseries(repo, start=t - 1, length=1, status='A',
2308 summary=opts.get('summary'))
2308 summary=opts.get('summary'))
2309 else:
2309 else:
2310 ui.write(_("no patches applied\n"))
2310 ui.write(_("no patches applied\n"))
2311 return 1
2311 return 1
2312
2312
2313 @command("qnext", seriesopts, _('hg qnext [-s]'))
2313 @command("qnext", seriesopts, _('hg qnext [-s]'))
2314 def next(ui, repo, **opts):
2314 def next(ui, repo, **opts):
2315 """print the name of the next pushable patch
2315 """print the name of the next pushable patch
2316
2316
2317 Returns 0 on success."""
2317 Returns 0 on success."""
2318 q = repo.mq
2318 q = repo.mq
2319 end = q.seriesend()
2319 end = q.seriesend()
2320 if end == len(q.series):
2320 if end == len(q.series):
2321 ui.write(_("all patches applied\n"))
2321 ui.write(_("all patches applied\n"))
2322 return 1
2322 return 1
2323 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2323 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2324
2324
2325 @command("qprev", seriesopts, _('hg qprev [-s]'))
2325 @command("qprev", seriesopts, _('hg qprev [-s]'))
2326 def prev(ui, repo, **opts):
2326 def prev(ui, repo, **opts):
2327 """print the name of the preceding applied patch
2327 """print the name of the preceding applied patch
2328
2328
2329 Returns 0 on success."""
2329 Returns 0 on success."""
2330 q = repo.mq
2330 q = repo.mq
2331 l = len(q.applied)
2331 l = len(q.applied)
2332 if l == 1:
2332 if l == 1:
2333 ui.write(_("only one patch applied\n"))
2333 ui.write(_("only one patch applied\n"))
2334 return 1
2334 return 1
2335 if not l:
2335 if not l:
2336 ui.write(_("no patches applied\n"))
2336 ui.write(_("no patches applied\n"))
2337 return 1
2337 return 1
2338 idx = q.series.index(q.applied[-2].name)
2338 idx = q.series.index(q.applied[-2].name)
2339 q.qseries(repo, start=idx, length=1, status='A',
2339 q.qseries(repo, start=idx, length=1, status='A',
2340 summary=opts.get('summary'))
2340 summary=opts.get('summary'))
2341
2341
2342 def setupheaderopts(ui, opts):
2342 def setupheaderopts(ui, opts):
2343 if not opts.get('user') and opts.get('currentuser'):
2343 if not opts.get('user') and opts.get('currentuser'):
2344 opts['user'] = ui.username()
2344 opts['user'] = ui.username()
2345 if not opts.get('date') and opts.get('currentdate'):
2345 if not opts.get('date') and opts.get('currentdate'):
2346 opts['date'] = "%d %d" % util.makedate()
2346 opts['date'] = "%d %d" % util.makedate()
2347
2347
2348 @command("^qnew",
2348 @command("^qnew",
2349 [('e', 'edit', None, _('edit commit message')),
2349 [('e', 'edit', None, _('edit commit message')),
2350 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2350 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2351 ('g', 'git', None, _('use git extended diff format')),
2351 ('g', 'git', None, _('use git extended diff format')),
2352 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2352 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2353 ('u', 'user', '',
2353 ('u', 'user', '',
2354 _('add "From: <USER>" to patch'), _('USER')),
2354 _('add "From: <USER>" to patch'), _('USER')),
2355 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2355 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2356 ('d', 'date', '',
2356 ('d', 'date', '',
2357 _('add "Date: <DATE>" to patch'), _('DATE'))
2357 _('add "Date: <DATE>" to patch'), _('DATE'))
2358 ] + commands.walkopts + commands.commitopts,
2358 ] + commands.walkopts + commands.commitopts,
2359 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'))
2359 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'))
2360 def new(ui, repo, patch, *args, **opts):
2360 def new(ui, repo, patch, *args, **opts):
2361 """create a new patch
2361 """create a new patch
2362
2362
2363 qnew creates a new patch on top of the currently-applied patch (if
2363 qnew creates a new patch on top of the currently-applied patch (if
2364 any). The patch will be initialized with any outstanding changes
2364 any). The patch will be initialized with any outstanding changes
2365 in the working directory. You may also use -I/--include,
2365 in the working directory. You may also use -I/--include,
2366 -X/--exclude, and/or a list of files after the patch name to add
2366 -X/--exclude, and/or a list of files after the patch name to add
2367 only changes to matching files to the new patch, leaving the rest
2367 only changes to matching files to the new patch, leaving the rest
2368 as uncommitted modifications.
2368 as uncommitted modifications.
2369
2369
2370 -u/--user and -d/--date can be used to set the (given) user and
2370 -u/--user and -d/--date can be used to set the (given) user and
2371 date, respectively. -U/--currentuser and -D/--currentdate set user
2371 date, respectively. -U/--currentuser and -D/--currentdate set user
2372 to current user and date to current date.
2372 to current user and date to current date.
2373
2373
2374 -e/--edit, -m/--message or -l/--logfile set the patch header as
2374 -e/--edit, -m/--message or -l/--logfile set the patch header as
2375 well as the commit message. If none is specified, the header is
2375 well as the commit message. If none is specified, the header is
2376 empty and the commit message is '[mq]: PATCH'.
2376 empty and the commit message is '[mq]: PATCH'.
2377
2377
2378 Use the -g/--git option to keep the patch in the git extended diff
2378 Use the -g/--git option to keep the patch in the git extended diff
2379 format. Read the diffs help topic for more information on why this
2379 format. Read the diffs help topic for more information on why this
2380 is important for preserving permission changes and copy/rename
2380 is important for preserving permission changes and copy/rename
2381 information.
2381 information.
2382
2382
2383 Returns 0 on successful creation of a new patch.
2383 Returns 0 on successful creation of a new patch.
2384 """
2384 """
2385 msg = cmdutil.logmessage(ui, opts)
2385 msg = cmdutil.logmessage(ui, opts)
2386 def getmsg():
2386 def getmsg():
2387 return ui.edit(msg, opts.get('user') or ui.username())
2387 return ui.edit(msg, opts.get('user') or ui.username())
2388 q = repo.mq
2388 q = repo.mq
2389 opts['msg'] = msg
2389 opts['msg'] = msg
2390 if opts.get('edit'):
2390 if opts.get('edit'):
2391 opts['msg'] = getmsg
2391 opts['msg'] = getmsg
2392 else:
2392 else:
2393 opts['msg'] = msg
2393 opts['msg'] = msg
2394 setupheaderopts(ui, opts)
2394 setupheaderopts(ui, opts)
2395 q.new(repo, patch, *args, **opts)
2395 q.new(repo, patch, *args, **opts)
2396 q.savedirty()
2396 q.savedirty()
2397 return 0
2397 return 0
2398
2398
2399 @command("^qrefresh",
2399 @command("^qrefresh",
2400 [('e', 'edit', None, _('edit commit message')),
2400 [('e', 'edit', None, _('edit commit message')),
2401 ('g', 'git', None, _('use git extended diff format')),
2401 ('g', 'git', None, _('use git extended diff format')),
2402 ('s', 'short', None,
2402 ('s', 'short', None,
2403 _('refresh only files already in the patch and specified files')),
2403 _('refresh only files already in the patch and specified files')),
2404 ('U', 'currentuser', None,
2404 ('U', 'currentuser', None,
2405 _('add/update author field in patch with current user')),
2405 _('add/update author field in patch with current user')),
2406 ('u', 'user', '',
2406 ('u', 'user', '',
2407 _('add/update author field in patch with given user'), _('USER')),
2407 _('add/update author field in patch with given user'), _('USER')),
2408 ('D', 'currentdate', None,
2408 ('D', 'currentdate', None,
2409 _('add/update date field in patch with current date')),
2409 _('add/update date field in patch with current date')),
2410 ('d', 'date', '',
2410 ('d', 'date', '',
2411 _('add/update date field in patch with given date'), _('DATE'))
2411 _('add/update date field in patch with given date'), _('DATE'))
2412 ] + commands.walkopts + commands.commitopts,
2412 ] + commands.walkopts + commands.commitopts,
2413 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'))
2413 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'))
2414 def refresh(ui, repo, *pats, **opts):
2414 def refresh(ui, repo, *pats, **opts):
2415 """update the current patch
2415 """update the current patch
2416
2416
2417 If any file patterns are provided, the refreshed patch will
2417 If any file patterns are provided, the refreshed patch will
2418 contain only the modifications that match those patterns; the
2418 contain only the modifications that match those patterns; the
2419 remaining modifications will remain in the working directory.
2419 remaining modifications will remain in the working directory.
2420
2420
2421 If -s/--short is specified, files currently included in the patch
2421 If -s/--short is specified, files currently included in the patch
2422 will be refreshed just like matched files and remain in the patch.
2422 will be refreshed just like matched files and remain in the patch.
2423
2423
2424 If -e/--edit is specified, Mercurial will start your configured editor for
2424 If -e/--edit is specified, Mercurial will start your configured editor for
2425 you to enter a message. In case qrefresh fails, you will find a backup of
2425 you to enter a message. In case qrefresh fails, you will find a backup of
2426 your message in ``.hg/last-message.txt``.
2426 your message in ``.hg/last-message.txt``.
2427
2427
2428 hg add/remove/copy/rename work as usual, though you might want to
2428 hg add/remove/copy/rename work as usual, though you might want to
2429 use git-style patches (-g/--git or [diff] git=1) to track copies
2429 use git-style patches (-g/--git or [diff] git=1) to track copies
2430 and renames. See the diffs help topic for more information on the
2430 and renames. See the diffs help topic for more information on the
2431 git diff format.
2431 git diff format.
2432
2432
2433 Returns 0 on success.
2433 Returns 0 on success.
2434 """
2434 """
2435 q = repo.mq
2435 q = repo.mq
2436 message = cmdutil.logmessage(ui, opts)
2436 message = cmdutil.logmessage(ui, opts)
2437 if opts.get('edit'):
2437 if opts.get('edit'):
2438 if not q.applied:
2438 if not q.applied:
2439 ui.write(_("no patches applied\n"))
2439 ui.write(_("no patches applied\n"))
2440 return 1
2440 return 1
2441 if message:
2441 if message:
2442 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2442 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2443 patch = q.applied[-1].name
2443 patch = q.applied[-1].name
2444 ph = patchheader(q.join(patch), q.plainmode)
2444 ph = patchheader(q.join(patch), q.plainmode)
2445 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2445 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2446 # We don't want to lose the patch message if qrefresh fails (issue2062)
2446 # We don't want to lose the patch message if qrefresh fails (issue2062)
2447 repo.savecommitmessage(message)
2447 repo.savecommitmessage(message)
2448 setupheaderopts(ui, opts)
2448 setupheaderopts(ui, opts)
2449 wlock = repo.wlock()
2449 wlock = repo.wlock()
2450 try:
2450 try:
2451 ret = q.refresh(repo, pats, msg=message, **opts)
2451 ret = q.refresh(repo, pats, msg=message, **opts)
2452 q.savedirty()
2452 q.savedirty()
2453 return ret
2453 return ret
2454 finally:
2454 finally:
2455 wlock.release()
2455 wlock.release()
2456
2456
2457 @command("^qdiff",
2457 @command("^qdiff",
2458 commands.diffopts + commands.diffopts2 + commands.walkopts,
2458 commands.diffopts + commands.diffopts2 + commands.walkopts,
2459 _('hg qdiff [OPTION]... [FILE]...'))
2459 _('hg qdiff [OPTION]... [FILE]...'))
2460 def diff(ui, repo, *pats, **opts):
2460 def diff(ui, repo, *pats, **opts):
2461 """diff of the current patch and subsequent modifications
2461 """diff of the current patch and subsequent modifications
2462
2462
2463 Shows a diff which includes the current patch as well as any
2463 Shows a diff which includes the current patch as well as any
2464 changes which have been made in the working directory since the
2464 changes which have been made in the working directory since the
2465 last refresh (thus showing what the current patch would become
2465 last refresh (thus showing what the current patch would become
2466 after a qrefresh).
2466 after a qrefresh).
2467
2467
2468 Use :hg:`diff` if you only want to see the changes made since the
2468 Use :hg:`diff` if you only want to see the changes made since the
2469 last qrefresh, or :hg:`export qtip` if you want to see changes
2469 last qrefresh, or :hg:`export qtip` if you want to see changes
2470 made by the current patch without including changes made since the
2470 made by the current patch without including changes made since the
2471 qrefresh.
2471 qrefresh.
2472
2472
2473 Returns 0 on success.
2473 Returns 0 on success.
2474 """
2474 """
2475 repo.mq.diff(repo, pats, opts)
2475 repo.mq.diff(repo, pats, opts)
2476 return 0
2476 return 0
2477
2477
2478 @command('qfold',
2478 @command('qfold',
2479 [('e', 'edit', None, _('edit patch header')),
2479 [('e', 'edit', None, _('edit patch header')),
2480 ('k', 'keep', None, _('keep folded patch files')),
2480 ('k', 'keep', None, _('keep folded patch files')),
2481 ] + commands.commitopts,
2481 ] + commands.commitopts,
2482 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2482 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2483 def fold(ui, repo, *files, **opts):
2483 def fold(ui, repo, *files, **opts):
2484 """fold the named patches into the current patch
2484 """fold the named patches into the current patch
2485
2485
2486 Patches must not yet be applied. Each patch will be successively
2486 Patches must not yet be applied. Each patch will be successively
2487 applied to the current patch in the order given. If all the
2487 applied to the current patch in the order given. If all the
2488 patches apply successfully, the current patch will be refreshed
2488 patches apply successfully, the current patch will be refreshed
2489 with the new cumulative patch, and the folded patches will be
2489 with the new cumulative patch, and the folded patches will be
2490 deleted. With -k/--keep, the folded patch files will not be
2490 deleted. With -k/--keep, the folded patch files will not be
2491 removed afterwards.
2491 removed afterwards.
2492
2492
2493 The header for each folded patch will be concatenated with the
2493 The header for each folded patch will be concatenated with the
2494 current patch header, separated by a line of ``* * *``.
2494 current patch header, separated by a line of ``* * *``.
2495
2495
2496 Returns 0 on success."""
2496 Returns 0 on success."""
2497 q = repo.mq
2497 q = repo.mq
2498 if not files:
2498 if not files:
2499 raise util.Abort(_('qfold requires at least one patch name'))
2499 raise util.Abort(_('qfold requires at least one patch name'))
2500 if not q.checktoppatch(repo)[0]:
2500 if not q.checktoppatch(repo)[0]:
2501 raise util.Abort(_('no patches applied'))
2501 raise util.Abort(_('no patches applied'))
2502 q.checklocalchanges(repo)
2502 q.checklocalchanges(repo)
2503
2503
2504 message = cmdutil.logmessage(ui, opts)
2504 message = cmdutil.logmessage(ui, opts)
2505 if opts.get('edit'):
2505 if opts.get('edit'):
2506 if message:
2506 if message:
2507 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2507 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2508
2508
2509 parent = q.lookup('qtip')
2509 parent = q.lookup('qtip')
2510 patches = []
2510 patches = []
2511 messages = []
2511 messages = []
2512 for f in files:
2512 for f in files:
2513 p = q.lookup(f)
2513 p = q.lookup(f)
2514 if p in patches or p == parent:
2514 if p in patches or p == parent:
2515 ui.warn(_('Skipping already folded patch %s\n') % p)
2515 ui.warn(_('Skipping already folded patch %s\n') % p)
2516 if q.isapplied(p):
2516 if q.isapplied(p):
2517 raise util.Abort(_('qfold cannot fold already applied patch %s')
2517 raise util.Abort(_('qfold cannot fold already applied patch %s')
2518 % p)
2518 % p)
2519 patches.append(p)
2519 patches.append(p)
2520
2520
2521 for p in patches:
2521 for p in patches:
2522 if not message:
2522 if not message:
2523 ph = patchheader(q.join(p), q.plainmode)
2523 ph = patchheader(q.join(p), q.plainmode)
2524 if ph.message:
2524 if ph.message:
2525 messages.append(ph.message)
2525 messages.append(ph.message)
2526 pf = q.join(p)
2526 pf = q.join(p)
2527 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2527 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2528 if not patchsuccess:
2528 if not patchsuccess:
2529 raise util.Abort(_('error folding patch %s') % p)
2529 raise util.Abort(_('error folding patch %s') % p)
2530
2530
2531 if not message:
2531 if not message:
2532 ph = patchheader(q.join(parent), q.plainmode)
2532 ph = patchheader(q.join(parent), q.plainmode)
2533 message, user = ph.message, ph.user
2533 message, user = ph.message, ph.user
2534 for msg in messages:
2534 for msg in messages:
2535 message.append('* * *')
2535 message.append('* * *')
2536 message.extend(msg)
2536 message.extend(msg)
2537 message = '\n'.join(message)
2537 message = '\n'.join(message)
2538
2538
2539 if opts.get('edit'):
2539 if opts.get('edit'):
2540 message = ui.edit(message, user or ui.username())
2540 message = ui.edit(message, user or ui.username())
2541
2541
2542 diffopts = q.patchopts(q.diffopts(), *patches)
2542 diffopts = q.patchopts(q.diffopts(), *patches)
2543 wlock = repo.wlock()
2543 wlock = repo.wlock()
2544 try:
2544 try:
2545 q.refresh(repo, msg=message, git=diffopts.git)
2545 q.refresh(repo, msg=message, git=diffopts.git)
2546 q.delete(repo, patches, opts)
2546 q.delete(repo, patches, opts)
2547 q.savedirty()
2547 q.savedirty()
2548 finally:
2548 finally:
2549 wlock.release()
2549 wlock.release()
2550
2550
2551 @command("qgoto",
2551 @command("qgoto",
2552 [('c', 'check', None, _('tolerate non-conflicting local changes')),
2552 [('c', 'check', None, _('tolerate non-conflicting local changes')),
2553 ('f', 'force', None, _('overwrite any local changes')),
2553 ('f', 'force', None, _('overwrite any local changes')),
2554 ('', 'no-backup', None, _('do not save backup copies of files'))],
2554 ('', 'no-backup', None, _('do not save backup copies of files'))],
2555 _('hg qgoto [OPTION]... PATCH'))
2555 _('hg qgoto [OPTION]... PATCH'))
2556 def goto(ui, repo, patch, **opts):
2556 def goto(ui, repo, patch, **opts):
2557 '''push or pop patches until named patch is at top of stack
2557 '''push or pop patches until named patch is at top of stack
2558
2558
2559 Returns 0 on success.'''
2559 Returns 0 on success.'''
2560 opts = fixcheckopts(ui, opts)
2560 opts = fixcheckopts(ui, opts)
2561 q = repo.mq
2561 q = repo.mq
2562 patch = q.lookup(patch)
2562 patch = q.lookup(patch)
2563 nobackup = opts.get('no_backup')
2563 nobackup = opts.get('no_backup')
2564 check = opts.get('check')
2564 check = opts.get('check')
2565 if q.isapplied(patch):
2565 if q.isapplied(patch):
2566 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2566 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2567 check=check)
2567 check=check)
2568 else:
2568 else:
2569 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2569 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2570 check=check)
2570 check=check)
2571 q.savedirty()
2571 q.savedirty()
2572 return ret
2572 return ret
2573
2573
2574 @command("qguard",
2574 @command("qguard",
2575 [('l', 'list', None, _('list all patches and guards')),
2575 [('l', 'list', None, _('list all patches and guards')),
2576 ('n', 'none', None, _('drop all guards'))],
2576 ('n', 'none', None, _('drop all guards'))],
2577 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2577 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2578 def guard(ui, repo, *args, **opts):
2578 def guard(ui, repo, *args, **opts):
2579 '''set or print guards for a patch
2579 '''set or print guards for a patch
2580
2580
2581 Guards control whether a patch can be pushed. A patch with no
2581 Guards control whether a patch can be pushed. A patch with no
2582 guards is always pushed. A patch with a positive guard ("+foo") is
2582 guards is always pushed. A patch with a positive guard ("+foo") is
2583 pushed only if the :hg:`qselect` command has activated it. A patch with
2583 pushed only if the :hg:`qselect` command has activated it. A patch with
2584 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2584 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2585 has activated it.
2585 has activated it.
2586
2586
2587 With no arguments, print the currently active guards.
2587 With no arguments, print the currently active guards.
2588 With arguments, set guards for the named patch.
2588 With arguments, set guards for the named patch.
2589
2589
2590 .. note::
2590 .. note::
2591 Specifying negative guards now requires '--'.
2591 Specifying negative guards now requires '--'.
2592
2592
2593 To set guards on another patch::
2593 To set guards on another patch::
2594
2594
2595 hg qguard other.patch -- +2.6.17 -stable
2595 hg qguard other.patch -- +2.6.17 -stable
2596
2596
2597 Returns 0 on success.
2597 Returns 0 on success.
2598 '''
2598 '''
2599 def status(idx):
2599 def status(idx):
2600 guards = q.seriesguards[idx] or ['unguarded']
2600 guards = q.seriesguards[idx] or ['unguarded']
2601 if q.series[idx] in applied:
2601 if q.series[idx] in applied:
2602 state = 'applied'
2602 state = 'applied'
2603 elif q.pushable(idx)[0]:
2603 elif q.pushable(idx)[0]:
2604 state = 'unapplied'
2604 state = 'unapplied'
2605 else:
2605 else:
2606 state = 'guarded'
2606 state = 'guarded'
2607 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2607 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2608 ui.write('%s: ' % ui.label(q.series[idx], label))
2608 ui.write('%s: ' % ui.label(q.series[idx], label))
2609
2609
2610 for i, guard in enumerate(guards):
2610 for i, guard in enumerate(guards):
2611 if guard.startswith('+'):
2611 if guard.startswith('+'):
2612 ui.write(guard, label='qguard.positive')
2612 ui.write(guard, label='qguard.positive')
2613 elif guard.startswith('-'):
2613 elif guard.startswith('-'):
2614 ui.write(guard, label='qguard.negative')
2614 ui.write(guard, label='qguard.negative')
2615 else:
2615 else:
2616 ui.write(guard, label='qguard.unguarded')
2616 ui.write(guard, label='qguard.unguarded')
2617 if i != len(guards) - 1:
2617 if i != len(guards) - 1:
2618 ui.write(' ')
2618 ui.write(' ')
2619 ui.write('\n')
2619 ui.write('\n')
2620 q = repo.mq
2620 q = repo.mq
2621 applied = set(p.name for p in q.applied)
2621 applied = set(p.name for p in q.applied)
2622 patch = None
2622 patch = None
2623 args = list(args)
2623 args = list(args)
2624 if opts.get('list'):
2624 if opts.get('list'):
2625 if args or opts.get('none'):
2625 if args or opts.get('none'):
2626 raise util.Abort(_('cannot mix -l/--list with options or '
2626 raise util.Abort(_('cannot mix -l/--list with options or '
2627 'arguments'))
2627 'arguments'))
2628 for i in xrange(len(q.series)):
2628 for i in xrange(len(q.series)):
2629 status(i)
2629 status(i)
2630 return
2630 return
2631 if not args or args[0][0:1] in '-+':
2631 if not args or args[0][0:1] in '-+':
2632 if not q.applied:
2632 if not q.applied:
2633 raise util.Abort(_('no patches applied'))
2633 raise util.Abort(_('no patches applied'))
2634 patch = q.applied[-1].name
2634 patch = q.applied[-1].name
2635 if patch is None and args[0][0:1] not in '-+':
2635 if patch is None and args[0][0:1] not in '-+':
2636 patch = args.pop(0)
2636 patch = args.pop(0)
2637 if patch is None:
2637 if patch is None:
2638 raise util.Abort(_('no patch to work with'))
2638 raise util.Abort(_('no patch to work with'))
2639 if args or opts.get('none'):
2639 if args or opts.get('none'):
2640 idx = q.findseries(patch)
2640 idx = q.findseries(patch)
2641 if idx is None:
2641 if idx is None:
2642 raise util.Abort(_('no patch named %s') % patch)
2642 raise util.Abort(_('no patch named %s') % patch)
2643 q.setguards(idx, args)
2643 q.setguards(idx, args)
2644 q.savedirty()
2644 q.savedirty()
2645 else:
2645 else:
2646 status(q.series.index(q.lookup(patch)))
2646 status(q.series.index(q.lookup(patch)))
2647
2647
2648 @command("qheader", [], _('hg qheader [PATCH]'))
2648 @command("qheader", [], _('hg qheader [PATCH]'))
2649 def header(ui, repo, patch=None):
2649 def header(ui, repo, patch=None):
2650 """print the header of the topmost or specified patch
2650 """print the header of the topmost or specified patch
2651
2651
2652 Returns 0 on success."""
2652 Returns 0 on success."""
2653 q = repo.mq
2653 q = repo.mq
2654
2654
2655 if patch:
2655 if patch:
2656 patch = q.lookup(patch)
2656 patch = q.lookup(patch)
2657 else:
2657 else:
2658 if not q.applied:
2658 if not q.applied:
2659 ui.write(_('no patches applied\n'))
2659 ui.write(_('no patches applied\n'))
2660 return 1
2660 return 1
2661 patch = q.lookup('qtip')
2661 patch = q.lookup('qtip')
2662 ph = patchheader(q.join(patch), q.plainmode)
2662 ph = patchheader(q.join(patch), q.plainmode)
2663
2663
2664 ui.write('\n'.join(ph.message) + '\n')
2664 ui.write('\n'.join(ph.message) + '\n')
2665
2665
2666 def lastsavename(path):
2666 def lastsavename(path):
2667 (directory, base) = os.path.split(path)
2667 (directory, base) = os.path.split(path)
2668 names = os.listdir(directory)
2668 names = os.listdir(directory)
2669 namere = re.compile("%s.([0-9]+)" % base)
2669 namere = re.compile("%s.([0-9]+)" % base)
2670 maxindex = None
2670 maxindex = None
2671 maxname = None
2671 maxname = None
2672 for f in names:
2672 for f in names:
2673 m = namere.match(f)
2673 m = namere.match(f)
2674 if m:
2674 if m:
2675 index = int(m.group(1))
2675 index = int(m.group(1))
2676 if maxindex is None or index > maxindex:
2676 if maxindex is None or index > maxindex:
2677 maxindex = index
2677 maxindex = index
2678 maxname = f
2678 maxname = f
2679 if maxname:
2679 if maxname:
2680 return (os.path.join(directory, maxname), maxindex)
2680 return (os.path.join(directory, maxname), maxindex)
2681 return (None, None)
2681 return (None, None)
2682
2682
2683 def savename(path):
2683 def savename(path):
2684 (last, index) = lastsavename(path)
2684 (last, index) = lastsavename(path)
2685 if last is None:
2685 if last is None:
2686 index = 0
2686 index = 0
2687 newpath = path + ".%d" % (index + 1)
2687 newpath = path + ".%d" % (index + 1)
2688 return newpath
2688 return newpath
2689
2689
2690 @command("^qpush",
2690 @command("^qpush",
2691 [('c', 'check', None, _('tolerate non-conflicting local changes')),
2691 [('c', 'check', None, _('tolerate non-conflicting local changes')),
2692 ('f', 'force', None, _('apply on top of local changes')),
2692 ('f', 'force', None, _('apply on top of local changes')),
2693 ('e', 'exact', None,
2693 ('e', 'exact', None,
2694 _('apply the target patch to its recorded parent')),
2694 _('apply the target patch to its recorded parent')),
2695 ('l', 'list', None, _('list patch name in commit text')),
2695 ('l', 'list', None, _('list patch name in commit text')),
2696 ('a', 'all', None, _('apply all patches')),
2696 ('a', 'all', None, _('apply all patches')),
2697 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2697 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2698 ('n', 'name', '',
2698 ('n', 'name', '',
2699 _('merge queue name (DEPRECATED)'), _('NAME')),
2699 _('merge queue name (DEPRECATED)'), _('NAME')),
2700 ('', 'move', None,
2700 ('', 'move', None,
2701 _('reorder patch series and apply only the patch')),
2701 _('reorder patch series and apply only the patch')),
2702 ('', 'no-backup', None, _('do not save backup copies of files'))],
2702 ('', 'no-backup', None, _('do not save backup copies of files'))],
2703 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2703 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2704 def push(ui, repo, patch=None, **opts):
2704 def push(ui, repo, patch=None, **opts):
2705 """push the next patch onto the stack
2705 """push the next patch onto the stack
2706
2706
2707 By default, abort if the working directory contains uncommitted
2707 By default, abort if the working directory contains uncommitted
2708 changes. With -c/--check, abort only if the uncommitted files
2708 changes. With -c/--check, abort only if the uncommitted files
2709 overlap with patched files. With -f/--force, backup and patch over
2709 overlap with patched files. With -f/--force, backup and patch over
2710 uncommitted changes.
2710 uncommitted changes.
2711
2711
2712 Return 0 on success.
2712 Return 0 on success.
2713 """
2713 """
2714 q = repo.mq
2714 q = repo.mq
2715 mergeq = None
2715 mergeq = None
2716
2716
2717 opts = fixcheckopts(ui, opts)
2717 opts = fixcheckopts(ui, opts)
2718 if opts.get('merge'):
2718 if opts.get('merge'):
2719 if opts.get('name'):
2719 if opts.get('name'):
2720 newpath = repo.join(opts.get('name'))
2720 newpath = repo.join(opts.get('name'))
2721 else:
2721 else:
2722 newpath, i = lastsavename(q.path)
2722 newpath, i = lastsavename(q.path)
2723 if not newpath:
2723 if not newpath:
2724 ui.warn(_("no saved queues found, please use -n\n"))
2724 ui.warn(_("no saved queues found, please use -n\n"))
2725 return 1
2725 return 1
2726 mergeq = queue(ui, repo.path, newpath)
2726 mergeq = queue(ui, repo.path, newpath)
2727 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2727 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2728 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2728 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2729 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2729 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2730 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2730 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2731 check=opts.get('check'))
2731 check=opts.get('check'))
2732 return ret
2732 return ret
2733
2733
2734 @command("^qpop",
2734 @command("^qpop",
2735 [('a', 'all', None, _('pop all patches')),
2735 [('a', 'all', None, _('pop all patches')),
2736 ('n', 'name', '',
2736 ('n', 'name', '',
2737 _('queue name to pop (DEPRECATED)'), _('NAME')),
2737 _('queue name to pop (DEPRECATED)'), _('NAME')),
2738 ('c', 'check', None, _('tolerate non-conflicting local changes')),
2738 ('c', 'check', None, _('tolerate non-conflicting local changes')),
2739 ('f', 'force', None, _('forget any local changes to patched files')),
2739 ('f', 'force', None, _('forget any local changes to patched files')),
2740 ('', 'no-backup', None, _('do not save backup copies of files'))],
2740 ('', 'no-backup', None, _('do not save backup copies of files'))],
2741 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2741 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2742 def pop(ui, repo, patch=None, **opts):
2742 def pop(ui, repo, patch=None, **opts):
2743 """pop the current patch off the stack
2743 """pop the current patch off the stack
2744
2744
2745 Without argument, pops off the top of the patch stack. If given a
2745 Without argument, pops off the top of the patch stack. If given a
2746 patch name, keeps popping off patches until the named patch is at
2746 patch name, keeps popping off patches until the named patch is at
2747 the top of the stack.
2747 the top of the stack.
2748
2748
2749 By default, abort if the working directory contains uncommitted
2749 By default, abort if the working directory contains uncommitted
2750 changes. With -c/--check, abort only if the uncommitted files
2750 changes. With -c/--check, abort only if the uncommitted files
2751 overlap with patched files. With -f/--force, backup and discard
2751 overlap with patched files. With -f/--force, backup and discard
2752 changes made to such files.
2752 changes made to such files.
2753
2753
2754 Return 0 on success.
2754 Return 0 on success.
2755 """
2755 """
2756 opts = fixcheckopts(ui, opts)
2756 opts = fixcheckopts(ui, opts)
2757 localupdate = True
2757 localupdate = True
2758 if opts.get('name'):
2758 if opts.get('name'):
2759 q = queue(ui, repo.path, repo.join(opts.get('name')))
2759 q = queue(ui, repo.path, repo.join(opts.get('name')))
2760 ui.warn(_('using patch queue: %s\n') % q.path)
2760 ui.warn(_('using patch queue: %s\n') % q.path)
2761 localupdate = False
2761 localupdate = False
2762 else:
2762 else:
2763 q = repo.mq
2763 q = repo.mq
2764 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2764 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2765 all=opts.get('all'), nobackup=opts.get('no_backup'),
2765 all=opts.get('all'), nobackup=opts.get('no_backup'),
2766 check=opts.get('check'))
2766 check=opts.get('check'))
2767 q.savedirty()
2767 q.savedirty()
2768 return ret
2768 return ret
2769
2769
2770 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2770 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2771 def rename(ui, repo, patch, name=None, **opts):
2771 def rename(ui, repo, patch, name=None, **opts):
2772 """rename a patch
2772 """rename a patch
2773
2773
2774 With one argument, renames the current patch to PATCH1.
2774 With one argument, renames the current patch to PATCH1.
2775 With two arguments, renames PATCH1 to PATCH2.
2775 With two arguments, renames PATCH1 to PATCH2.
2776
2776
2777 Returns 0 on success."""
2777 Returns 0 on success."""
2778 q = repo.mq
2778 q = repo.mq
2779 if not name:
2779 if not name:
2780 name = patch
2780 name = patch
2781 patch = None
2781 patch = None
2782
2782
2783 if patch:
2783 if patch:
2784 patch = q.lookup(patch)
2784 patch = q.lookup(patch)
2785 else:
2785 else:
2786 if not q.applied:
2786 if not q.applied:
2787 ui.write(_('no patches applied\n'))
2787 ui.write(_('no patches applied\n'))
2788 return
2788 return
2789 patch = q.lookup('qtip')
2789 patch = q.lookup('qtip')
2790 absdest = q.join(name)
2790 absdest = q.join(name)
2791 if os.path.isdir(absdest):
2791 if os.path.isdir(absdest):
2792 name = normname(os.path.join(name, os.path.basename(patch)))
2792 name = normname(os.path.join(name, os.path.basename(patch)))
2793 absdest = q.join(name)
2793 absdest = q.join(name)
2794 q.checkpatchname(name)
2794 q.checkpatchname(name)
2795
2795
2796 ui.note(_('renaming %s to %s\n') % (patch, name))
2796 ui.note(_('renaming %s to %s\n') % (patch, name))
2797 i = q.findseries(patch)
2797 i = q.findseries(patch)
2798 guards = q.guard_re.findall(q.fullseries[i])
2798 guards = q.guard_re.findall(q.fullseries[i])
2799 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
2799 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
2800 q.parseseries()
2800 q.parseseries()
2801 q.seriesdirty = True
2801 q.seriesdirty = True
2802
2802
2803 info = q.isapplied(patch)
2803 info = q.isapplied(patch)
2804 if info:
2804 if info:
2805 q.applied[info[0]] = statusentry(info[1], name)
2805 q.applied[info[0]] = statusentry(info[1], name)
2806 q.applieddirty = True
2806 q.applieddirty = True
2807
2807
2808 destdir = os.path.dirname(absdest)
2808 destdir = os.path.dirname(absdest)
2809 if not os.path.isdir(destdir):
2809 if not os.path.isdir(destdir):
2810 os.makedirs(destdir)
2810 os.makedirs(destdir)
2811 util.rename(q.join(patch), absdest)
2811 util.rename(q.join(patch), absdest)
2812 r = q.qrepo()
2812 r = q.qrepo()
2813 if r and patch in r.dirstate:
2813 if r and patch in r.dirstate:
2814 wctx = r[None]
2814 wctx = r[None]
2815 wlock = r.wlock()
2815 wlock = r.wlock()
2816 try:
2816 try:
2817 if r.dirstate[patch] == 'a':
2817 if r.dirstate[patch] == 'a':
2818 r.dirstate.drop(patch)
2818 r.dirstate.drop(patch)
2819 r.dirstate.add(name)
2819 r.dirstate.add(name)
2820 else:
2820 else:
2821 wctx.copy(patch, name)
2821 wctx.copy(patch, name)
2822 wctx.forget([patch])
2822 wctx.forget([patch])
2823 finally:
2823 finally:
2824 wlock.release()
2824 wlock.release()
2825
2825
2826 q.savedirty()
2826 q.savedirty()
2827
2827
2828 @command("qrestore",
2828 @command("qrestore",
2829 [('d', 'delete', None, _('delete save entry')),
2829 [('d', 'delete', None, _('delete save entry')),
2830 ('u', 'update', None, _('update queue working directory'))],
2830 ('u', 'update', None, _('update queue working directory'))],
2831 _('hg qrestore [-d] [-u] REV'))
2831 _('hg qrestore [-d] [-u] REV'))
2832 def restore(ui, repo, rev, **opts):
2832 def restore(ui, repo, rev, **opts):
2833 """restore the queue state saved by a revision (DEPRECATED)
2833 """restore the queue state saved by a revision (DEPRECATED)
2834
2834
2835 This command is deprecated, use :hg:`rebase` instead."""
2835 This command is deprecated, use :hg:`rebase` instead."""
2836 rev = repo.lookup(rev)
2836 rev = repo.lookup(rev)
2837 q = repo.mq
2837 q = repo.mq
2838 q.restore(repo, rev, delete=opts.get('delete'),
2838 q.restore(repo, rev, delete=opts.get('delete'),
2839 qupdate=opts.get('update'))
2839 qupdate=opts.get('update'))
2840 q.savedirty()
2840 q.savedirty()
2841 return 0
2841 return 0
2842
2842
2843 @command("qsave",
2843 @command("qsave",
2844 [('c', 'copy', None, _('copy patch directory')),
2844 [('c', 'copy', None, _('copy patch directory')),
2845 ('n', 'name', '',
2845 ('n', 'name', '',
2846 _('copy directory name'), _('NAME')),
2846 _('copy directory name'), _('NAME')),
2847 ('e', 'empty', None, _('clear queue status file')),
2847 ('e', 'empty', None, _('clear queue status file')),
2848 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2848 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2849 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
2849 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
2850 def save(ui, repo, **opts):
2850 def save(ui, repo, **opts):
2851 """save current queue state (DEPRECATED)
2851 """save current queue state (DEPRECATED)
2852
2852
2853 This command is deprecated, use :hg:`rebase` instead."""
2853 This command is deprecated, use :hg:`rebase` instead."""
2854 q = repo.mq
2854 q = repo.mq
2855 message = cmdutil.logmessage(ui, opts)
2855 message = cmdutil.logmessage(ui, opts)
2856 ret = q.save(repo, msg=message)
2856 ret = q.save(repo, msg=message)
2857 if ret:
2857 if ret:
2858 return ret
2858 return ret
2859 q.savedirty() # save to .hg/patches before copying
2859 q.savedirty() # save to .hg/patches before copying
2860 if opts.get('copy'):
2860 if opts.get('copy'):
2861 path = q.path
2861 path = q.path
2862 if opts.get('name'):
2862 if opts.get('name'):
2863 newpath = os.path.join(q.basepath, opts.get('name'))
2863 newpath = os.path.join(q.basepath, opts.get('name'))
2864 if os.path.exists(newpath):
2864 if os.path.exists(newpath):
2865 if not os.path.isdir(newpath):
2865 if not os.path.isdir(newpath):
2866 raise util.Abort(_('destination %s exists and is not '
2866 raise util.Abort(_('destination %s exists and is not '
2867 'a directory') % newpath)
2867 'a directory') % newpath)
2868 if not opts.get('force'):
2868 if not opts.get('force'):
2869 raise util.Abort(_('destination %s exists, '
2869 raise util.Abort(_('destination %s exists, '
2870 'use -f to force') % newpath)
2870 'use -f to force') % newpath)
2871 else:
2871 else:
2872 newpath = savename(path)
2872 newpath = savename(path)
2873 ui.warn(_("copy %s to %s\n") % (path, newpath))
2873 ui.warn(_("copy %s to %s\n") % (path, newpath))
2874 util.copyfiles(path, newpath)
2874 util.copyfiles(path, newpath)
2875 if opts.get('empty'):
2875 if opts.get('empty'):
2876 del q.applied[:]
2876 del q.applied[:]
2877 q.applieddirty = True
2877 q.applieddirty = True
2878 q.savedirty()
2878 q.savedirty()
2879 return 0
2879 return 0
2880
2880
2881 @command("strip",
2881 @command("strip",
2882 [
2882 [
2883 ('r', 'rev', [], _('strip specified revision (optional, '
2883 ('r', 'rev', [], _('strip specified revision (optional, '
2884 'can specify revisions without this '
2884 'can specify revisions without this '
2885 'option)'), _('REV')),
2885 'option)'), _('REV')),
2886 ('f', 'force', None, _('force removal of changesets, discard '
2886 ('f', 'force', None, _('force removal of changesets, discard '
2887 'uncommitted changes (no backup)')),
2887 'uncommitted changes (no backup)')),
2888 ('b', 'backup', None, _('bundle only changesets with local revision'
2888 ('b', 'backup', None, _('bundle only changesets with local revision'
2889 ' number greater than REV which are not'
2889 ' number greater than REV which are not'
2890 ' descendants of REV (DEPRECATED)')),
2890 ' descendants of REV (DEPRECATED)')),
2891 ('', 'no-backup', None, _('no backups')),
2891 ('', 'no-backup', None, _('no backups')),
2892 ('', 'nobackup', None, _('no backups (DEPRECATED)')),
2892 ('', 'nobackup', None, _('no backups (DEPRECATED)')),
2893 ('n', '', None, _('ignored (DEPRECATED)')),
2893 ('n', '', None, _('ignored (DEPRECATED)')),
2894 ('k', 'keep', None, _("do not modify working copy during strip"))],
2894 ('k', 'keep', None, _("do not modify working copy during strip"))],
2895 _('hg strip [-k] [-f] [-n] REV...'))
2895 _('hg strip [-k] [-f] [-n] REV...'))
2896 def strip(ui, repo, *revs, **opts):
2896 def strip(ui, repo, *revs, **opts):
2897 """strip changesets and all their descendants from the repository
2897 """strip changesets and all their descendants from the repository
2898
2898
2899 The strip command removes the specified changesets and all their
2899 The strip command removes the specified changesets and all their
2900 descendants. If the working directory has uncommitted changes, the
2900 descendants. If the working directory has uncommitted changes, the
2901 operation is aborted unless the --force flag is supplied, in which
2901 operation is aborted unless the --force flag is supplied, in which
2902 case changes will be discarded.
2902 case changes will be discarded.
2903
2903
2904 If a parent of the working directory is stripped, then the working
2904 If a parent of the working directory is stripped, then the working
2905 directory will automatically be updated to the most recent
2905 directory will automatically be updated to the most recent
2906 available ancestor of the stripped parent after the operation
2906 available ancestor of the stripped parent after the operation
2907 completes.
2907 completes.
2908
2908
2909 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2909 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2910 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2910 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2911 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2911 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2912 where BUNDLE is the bundle file created by the strip. Note that
2912 where BUNDLE is the bundle file created by the strip. Note that
2913 the local revision numbers will in general be different after the
2913 the local revision numbers will in general be different after the
2914 restore.
2914 restore.
2915
2915
2916 Use the --no-backup option to discard the backup bundle once the
2916 Use the --no-backup option to discard the backup bundle once the
2917 operation completes.
2917 operation completes.
2918
2918
2919 Return 0 on success.
2919 Return 0 on success.
2920 """
2920 """
2921 backup = 'all'
2921 backup = 'all'
2922 if opts.get('backup'):
2922 if opts.get('backup'):
2923 backup = 'strip'
2923 backup = 'strip'
2924 elif opts.get('no_backup') or opts.get('nobackup'):
2924 elif opts.get('no_backup') or opts.get('nobackup'):
2925 backup = 'none'
2925 backup = 'none'
2926
2926
2927 cl = repo.changelog
2927 cl = repo.changelog
2928 revs = list(revs) + opts.get('rev')
2928 revs = list(revs) + opts.get('rev')
2929 revs = set(scmutil.revrange(repo, revs))
2929 revs = set(scmutil.revrange(repo, revs))
2930 if not revs:
2930 if not revs:
2931 raise util.Abort(_('empty revision set'))
2931 raise util.Abort(_('empty revision set'))
2932
2932
2933 descendants = set(cl.descendants(*revs))
2933 descendants = set(cl.descendants(*revs))
2934 strippedrevs = revs.union(descendants)
2934 strippedrevs = revs.union(descendants)
2935 roots = revs.difference(descendants)
2935 roots = revs.difference(descendants)
2936
2936
2937 update = False
2937 update = False
2938 # if one of the wdir parent is stripped we'll need
2938 # if one of the wdir parent is stripped we'll need
2939 # to update away to an earlier revision
2939 # to update away to an earlier revision
2940 for p in repo.dirstate.parents():
2940 for p in repo.dirstate.parents():
2941 if p != nullid and cl.rev(p) in strippedrevs:
2941 if p != nullid and cl.rev(p) in strippedrevs:
2942 update = True
2942 update = True
2943 break
2943 break
2944
2944
2945 rootnodes = set(cl.node(r) for r in roots)
2945 rootnodes = set(cl.node(r) for r in roots)
2946
2946
2947 q = repo.mq
2947 q = repo.mq
2948 if q.applied:
2948 if q.applied:
2949 # refresh queue state if we're about to strip
2949 # refresh queue state if we're about to strip
2950 # applied patches
2950 # applied patches
2951 if cl.rev(repo.lookup('qtip')) in strippedrevs:
2951 if cl.rev(repo.lookup('qtip')) in strippedrevs:
2952 q.applieddirty = True
2952 q.applieddirty = True
2953 start = 0
2953 start = 0
2954 end = len(q.applied)
2954 end = len(q.applied)
2955 for i, statusentry in enumerate(q.applied):
2955 for i, statusentry in enumerate(q.applied):
2956 if statusentry.node in rootnodes:
2956 if statusentry.node in rootnodes:
2957 # if one of the stripped roots is an applied
2957 # if one of the stripped roots is an applied
2958 # patch, only part of the queue is stripped
2958 # patch, only part of the queue is stripped
2959 start = i
2959 start = i
2960 break
2960 break
2961 del q.applied[start:end]
2961 del q.applied[start:end]
2962 q.savedirty()
2962 q.savedirty()
2963
2963
2964 revs = list(rootnodes)
2964 revs = list(rootnodes)
2965 if update and opts.get('keep'):
2965 if update and opts.get('keep'):
2966 wlock = repo.wlock()
2966 wlock = repo.wlock()
2967 try:
2967 try:
2968 urev = repo.mq.qparents(repo, revs[0])
2968 urev = repo.mq.qparents(repo, revs[0])
2969 repo.dirstate.rebuild(urev, repo[urev].manifest())
2969 repo.dirstate.rebuild(urev, repo[urev].manifest())
2970 repo.dirstate.write()
2970 repo.dirstate.write()
2971 update = False
2971 update = False
2972 finally:
2972 finally:
2973 wlock.release()
2973 wlock.release()
2974
2974
2975 repo.mq.strip(repo, revs, backup=backup, update=update,
2975 repo.mq.strip(repo, revs, backup=backup, update=update,
2976 force=opts.get('force'))
2976 force=opts.get('force'))
2977 return 0
2977 return 0
2978
2978
2979 @command("qselect",
2979 @command("qselect",
2980 [('n', 'none', None, _('disable all guards')),
2980 [('n', 'none', None, _('disable all guards')),
2981 ('s', 'series', None, _('list all guards in series file')),
2981 ('s', 'series', None, _('list all guards in series file')),
2982 ('', 'pop', None, _('pop to before first guarded applied patch')),
2982 ('', 'pop', None, _('pop to before first guarded applied patch')),
2983 ('', 'reapply', None, _('pop, then reapply patches'))],
2983 ('', 'reapply', None, _('pop, then reapply patches'))],
2984 _('hg qselect [OPTION]... [GUARD]...'))
2984 _('hg qselect [OPTION]... [GUARD]...'))
2985 def select(ui, repo, *args, **opts):
2985 def select(ui, repo, *args, **opts):
2986 '''set or print guarded patches to push
2986 '''set or print guarded patches to push
2987
2987
2988 Use the :hg:`qguard` command to set or print guards on patch, then use
2988 Use the :hg:`qguard` command to set or print guards on patch, then use
2989 qselect to tell mq which guards to use. A patch will be pushed if
2989 qselect to tell mq which guards to use. A patch will be pushed if
2990 it has no guards or any positive guards match the currently
2990 it has no guards or any positive guards match the currently
2991 selected guard, but will not be pushed if any negative guards
2991 selected guard, but will not be pushed if any negative guards
2992 match the current guard. For example::
2992 match the current guard. For example::
2993
2993
2994 qguard foo.patch -- -stable (negative guard)
2994 qguard foo.patch -- -stable (negative guard)
2995 qguard bar.patch +stable (positive guard)
2995 qguard bar.patch +stable (positive guard)
2996 qselect stable
2996 qselect stable
2997
2997
2998 This activates the "stable" guard. mq will skip foo.patch (because
2998 This activates the "stable" guard. mq will skip foo.patch (because
2999 it has a negative match) but push bar.patch (because it has a
2999 it has a negative match) but push bar.patch (because it has a
3000 positive match).
3000 positive match).
3001
3001
3002 With no arguments, prints the currently active guards.
3002 With no arguments, prints the currently active guards.
3003 With one argument, sets the active guard.
3003 With one argument, sets the active guard.
3004
3004
3005 Use -n/--none to deactivate guards (no other arguments needed).
3005 Use -n/--none to deactivate guards (no other arguments needed).
3006 When no guards are active, patches with positive guards are
3006 When no guards are active, patches with positive guards are
3007 skipped and patches with negative guards are pushed.
3007 skipped and patches with negative guards are pushed.
3008
3008
3009 qselect can change the guards on applied patches. It does not pop
3009 qselect can change the guards on applied patches. It does not pop
3010 guarded patches by default. Use --pop to pop back to the last
3010 guarded patches by default. Use --pop to pop back to the last
3011 applied patch that is not guarded. Use --reapply (which implies
3011 applied patch that is not guarded. Use --reapply (which implies
3012 --pop) to push back to the current patch afterwards, but skip
3012 --pop) to push back to the current patch afterwards, but skip
3013 guarded patches.
3013 guarded patches.
3014
3014
3015 Use -s/--series to print a list of all guards in the series file
3015 Use -s/--series to print a list of all guards in the series file
3016 (no other arguments needed). Use -v for more information.
3016 (no other arguments needed). Use -v for more information.
3017
3017
3018 Returns 0 on success.'''
3018 Returns 0 on success.'''
3019
3019
3020 q = repo.mq
3020 q = repo.mq
3021 guards = q.active()
3021 guards = q.active()
3022 if args or opts.get('none'):
3022 if args or opts.get('none'):
3023 old_unapplied = q.unapplied(repo)
3023 old_unapplied = q.unapplied(repo)
3024 old_guarded = [i for i in xrange(len(q.applied)) if
3024 old_guarded = [i for i in xrange(len(q.applied)) if
3025 not q.pushable(i)[0]]
3025 not q.pushable(i)[0]]
3026 q.setactive(args)
3026 q.setactive(args)
3027 q.savedirty()
3027 q.savedirty()
3028 if not args:
3028 if not args:
3029 ui.status(_('guards deactivated\n'))
3029 ui.status(_('guards deactivated\n'))
3030 if not opts.get('pop') and not opts.get('reapply'):
3030 if not opts.get('pop') and not opts.get('reapply'):
3031 unapplied = q.unapplied(repo)
3031 unapplied = q.unapplied(repo)
3032 guarded = [i for i in xrange(len(q.applied))
3032 guarded = [i for i in xrange(len(q.applied))
3033 if not q.pushable(i)[0]]
3033 if not q.pushable(i)[0]]
3034 if len(unapplied) != len(old_unapplied):
3034 if len(unapplied) != len(old_unapplied):
3035 ui.status(_('number of unguarded, unapplied patches has '
3035 ui.status(_('number of unguarded, unapplied patches has '
3036 'changed from %d to %d\n') %
3036 'changed from %d to %d\n') %
3037 (len(old_unapplied), len(unapplied)))
3037 (len(old_unapplied), len(unapplied)))
3038 if len(guarded) != len(old_guarded):
3038 if len(guarded) != len(old_guarded):
3039 ui.status(_('number of guarded, applied patches has changed '
3039 ui.status(_('number of guarded, applied patches has changed '
3040 'from %d to %d\n') %
3040 'from %d to %d\n') %
3041 (len(old_guarded), len(guarded)))
3041 (len(old_guarded), len(guarded)))
3042 elif opts.get('series'):
3042 elif opts.get('series'):
3043 guards = {}
3043 guards = {}
3044 noguards = 0
3044 noguards = 0
3045 for gs in q.seriesguards:
3045 for gs in q.seriesguards:
3046 if not gs:
3046 if not gs:
3047 noguards += 1
3047 noguards += 1
3048 for g in gs:
3048 for g in gs:
3049 guards.setdefault(g, 0)
3049 guards.setdefault(g, 0)
3050 guards[g] += 1
3050 guards[g] += 1
3051 if ui.verbose:
3051 if ui.verbose:
3052 guards['NONE'] = noguards
3052 guards['NONE'] = noguards
3053 guards = guards.items()
3053 guards = guards.items()
3054 guards.sort(key=lambda x: x[0][1:])
3054 guards.sort(key=lambda x: x[0][1:])
3055 if guards:
3055 if guards:
3056 ui.note(_('guards in series file:\n'))
3056 ui.note(_('guards in series file:\n'))
3057 for guard, count in guards:
3057 for guard, count in guards:
3058 ui.note('%2d ' % count)
3058 ui.note('%2d ' % count)
3059 ui.write(guard, '\n')
3059 ui.write(guard, '\n')
3060 else:
3060 else:
3061 ui.note(_('no guards in series file\n'))
3061 ui.note(_('no guards in series file\n'))
3062 else:
3062 else:
3063 if guards:
3063 if guards:
3064 ui.note(_('active guards:\n'))
3064 ui.note(_('active guards:\n'))
3065 for g in guards:
3065 for g in guards:
3066 ui.write(g, '\n')
3066 ui.write(g, '\n')
3067 else:
3067 else:
3068 ui.write(_('no active guards\n'))
3068 ui.write(_('no active guards\n'))
3069 reapply = opts.get('reapply') and q.applied and q.appliedname(-1)
3069 reapply = opts.get('reapply') and q.applied and q.appliedname(-1)
3070 popped = False
3070 popped = False
3071 if opts.get('pop') or opts.get('reapply'):
3071 if opts.get('pop') or opts.get('reapply'):
3072 for i in xrange(len(q.applied)):
3072 for i in xrange(len(q.applied)):
3073 pushable, reason = q.pushable(i)
3073 pushable, reason = q.pushable(i)
3074 if not pushable:
3074 if not pushable:
3075 ui.status(_('popping guarded patches\n'))
3075 ui.status(_('popping guarded patches\n'))
3076 popped = True
3076 popped = True
3077 if i == 0:
3077 if i == 0:
3078 q.pop(repo, all=True)
3078 q.pop(repo, all=True)
3079 else:
3079 else:
3080 q.pop(repo, str(i - 1))
3080 q.pop(repo, str(i - 1))
3081 break
3081 break
3082 if popped:
3082 if popped:
3083 try:
3083 try:
3084 if reapply:
3084 if reapply:
3085 ui.status(_('reapplying unguarded patches\n'))
3085 ui.status(_('reapplying unguarded patches\n'))
3086 q.push(repo, reapply)
3086 q.push(repo, reapply)
3087 finally:
3087 finally:
3088 q.savedirty()
3088 q.savedirty()
3089
3089
3090 @command("qfinish",
3090 @command("qfinish",
3091 [('a', 'applied', None, _('finish all applied changesets'))],
3091 [('a', 'applied', None, _('finish all applied changesets'))],
3092 _('hg qfinish [-a] [REV]...'))
3092 _('hg qfinish [-a] [REV]...'))
3093 def finish(ui, repo, *revrange, **opts):
3093 def finish(ui, repo, *revrange, **opts):
3094 """move applied patches into repository history
3094 """move applied patches into repository history
3095
3095
3096 Finishes the specified revisions (corresponding to applied
3096 Finishes the specified revisions (corresponding to applied
3097 patches) by moving them out of mq control into regular repository
3097 patches) by moving them out of mq control into regular repository
3098 history.
3098 history.
3099
3099
3100 Accepts a revision range or the -a/--applied option. If --applied
3100 Accepts a revision range or the -a/--applied option. If --applied
3101 is specified, all applied mq revisions are removed from mq
3101 is specified, all applied mq revisions are removed from mq
3102 control. Otherwise, the given revisions must be at the base of the
3102 control. Otherwise, the given revisions must be at the base of the
3103 stack of applied patches.
3103 stack of applied patches.
3104
3104
3105 This can be especially useful if your changes have been applied to
3105 This can be especially useful if your changes have been applied to
3106 an upstream repository, or if you are about to push your changes
3106 an upstream repository, or if you are about to push your changes
3107 to upstream.
3107 to upstream.
3108
3108
3109 Returns 0 on success.
3109 Returns 0 on success.
3110 """
3110 """
3111 if not opts.get('applied') and not revrange:
3111 if not opts.get('applied') and not revrange:
3112 raise util.Abort(_('no revisions specified'))
3112 raise util.Abort(_('no revisions specified'))
3113 elif opts.get('applied'):
3113 elif opts.get('applied'):
3114 revrange = ('qbase::qtip',) + revrange
3114 revrange = ('qbase::qtip',) + revrange
3115
3115
3116 q = repo.mq
3116 q = repo.mq
3117 if not q.applied:
3117 if not q.applied:
3118 ui.status(_('no patches applied\n'))
3118 ui.status(_('no patches applied\n'))
3119 return 0
3119 return 0
3120
3120
3121 revs = scmutil.revrange(repo, revrange)
3121 revs = scmutil.revrange(repo, revrange)
3122 if repo['.'].rev() in revs and repo[None].files():
3122 if repo['.'].rev() in revs and repo[None].files():
3123 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3123 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3124 # queue.finish may changes phases but leave the responsability to lock the
3124 # queue.finish may changes phases but leave the responsability to lock the
3125 # repo to the caller to avoid deadlock with wlock. This command code is
3125 # repo to the caller to avoid deadlock with wlock. This command code is
3126 # responsability for this locking.
3126 # responsability for this locking.
3127 lock = repo.lock()
3127 lock = repo.lock()
3128 try:
3128 try:
3129 q.finish(repo, revs)
3129 q.finish(repo, revs)
3130 q.savedirty()
3130 q.savedirty()
3131 finally:
3131 finally:
3132 lock.release()
3132 lock.release()
3133 return 0
3133 return 0
3134
3134
3135 @command("qqueue",
3135 @command("qqueue",
3136 [('l', 'list', False, _('list all available queues')),
3136 [('l', 'list', False, _('list all available queues')),
3137 ('', 'active', False, _('print name of active queue')),
3137 ('', 'active', False, _('print name of active queue')),
3138 ('c', 'create', False, _('create new queue')),
3138 ('c', 'create', False, _('create new queue')),
3139 ('', 'rename', False, _('rename active queue')),
3139 ('', 'rename', False, _('rename active queue')),
3140 ('', 'delete', False, _('delete reference to queue')),
3140 ('', 'delete', False, _('delete reference to queue')),
3141 ('', 'purge', False, _('delete queue, and remove patch dir')),
3141 ('', 'purge', False, _('delete queue, and remove patch dir')),
3142 ],
3142 ],
3143 _('[OPTION] [QUEUE]'))
3143 _('[OPTION] [QUEUE]'))
3144 def qqueue(ui, repo, name=None, **opts):
3144 def qqueue(ui, repo, name=None, **opts):
3145 '''manage multiple patch queues
3145 '''manage multiple patch queues
3146
3146
3147 Supports switching between different patch queues, as well as creating
3147 Supports switching between different patch queues, as well as creating
3148 new patch queues and deleting existing ones.
3148 new patch queues and deleting existing ones.
3149
3149
3150 Omitting a queue name or specifying -l/--list will show you the registered
3150 Omitting a queue name or specifying -l/--list will show you the registered
3151 queues - by default the "normal" patches queue is registered. The currently
3151 queues - by default the "normal" patches queue is registered. The currently
3152 active queue will be marked with "(active)". Specifying --active will print
3152 active queue will be marked with "(active)". Specifying --active will print
3153 only the name of the active queue.
3153 only the name of the active queue.
3154
3154
3155 To create a new queue, use -c/--create. The queue is automatically made
3155 To create a new queue, use -c/--create. The queue is automatically made
3156 active, except in the case where there are applied patches from the
3156 active, except in the case where there are applied patches from the
3157 currently active queue in the repository. Then the queue will only be
3157 currently active queue in the repository. Then the queue will only be
3158 created and switching will fail.
3158 created and switching will fail.
3159
3159
3160 To delete an existing queue, use --delete. You cannot delete the currently
3160 To delete an existing queue, use --delete. You cannot delete the currently
3161 active queue.
3161 active queue.
3162
3162
3163 Returns 0 on success.
3163 Returns 0 on success.
3164 '''
3164 '''
3165 q = repo.mq
3165 q = repo.mq
3166 _defaultqueue = 'patches'
3166 _defaultqueue = 'patches'
3167 _allqueues = 'patches.queues'
3167 _allqueues = 'patches.queues'
3168 _activequeue = 'patches.queue'
3168 _activequeue = 'patches.queue'
3169
3169
3170 def _getcurrent():
3170 def _getcurrent():
3171 cur = os.path.basename(q.path)
3171 cur = os.path.basename(q.path)
3172 if cur.startswith('patches-'):
3172 if cur.startswith('patches-'):
3173 cur = cur[8:]
3173 cur = cur[8:]
3174 return cur
3174 return cur
3175
3175
3176 def _noqueues():
3176 def _noqueues():
3177 try:
3177 try:
3178 fh = repo.opener(_allqueues, 'r')
3178 fh = repo.opener(_allqueues, 'r')
3179 fh.close()
3179 fh.close()
3180 except IOError:
3180 except IOError:
3181 return True
3181 return True
3182
3182
3183 return False
3183 return False
3184
3184
3185 def _getqueues():
3185 def _getqueues():
3186 current = _getcurrent()
3186 current = _getcurrent()
3187
3187
3188 try:
3188 try:
3189 fh = repo.opener(_allqueues, 'r')
3189 fh = repo.opener(_allqueues, 'r')
3190 queues = [queue.strip() for queue in fh if queue.strip()]
3190 queues = [queue.strip() for queue in fh if queue.strip()]
3191 fh.close()
3191 fh.close()
3192 if current not in queues:
3192 if current not in queues:
3193 queues.append(current)
3193 queues.append(current)
3194 except IOError:
3194 except IOError:
3195 queues = [_defaultqueue]
3195 queues = [_defaultqueue]
3196
3196
3197 return sorted(queues)
3197 return sorted(queues)
3198
3198
3199 def _setactive(name):
3199 def _setactive(name):
3200 if q.applied:
3200 if q.applied:
3201 raise util.Abort(_('patches applied - cannot set new queue active'))
3201 raise util.Abort(_('patches applied - cannot set new queue active'))
3202 _setactivenocheck(name)
3202 _setactivenocheck(name)
3203
3203
3204 def _setactivenocheck(name):
3204 def _setactivenocheck(name):
3205 fh = repo.opener(_activequeue, 'w')
3205 fh = repo.opener(_activequeue, 'w')
3206 if name != 'patches':
3206 if name != 'patches':
3207 fh.write(name)
3207 fh.write(name)
3208 fh.close()
3208 fh.close()
3209
3209
3210 def _addqueue(name):
3210 def _addqueue(name):
3211 fh = repo.opener(_allqueues, 'a')
3211 fh = repo.opener(_allqueues, 'a')
3212 fh.write('%s\n' % (name,))
3212 fh.write('%s\n' % (name,))
3213 fh.close()
3213 fh.close()
3214
3214
3215 def _queuedir(name):
3215 def _queuedir(name):
3216 if name == 'patches':
3216 if name == 'patches':
3217 return repo.join('patches')
3217 return repo.join('patches')
3218 else:
3218 else:
3219 return repo.join('patches-' + name)
3219 return repo.join('patches-' + name)
3220
3220
3221 def _validname(name):
3221 def _validname(name):
3222 for n in name:
3222 for n in name:
3223 if n in ':\\/.':
3223 if n in ':\\/.':
3224 return False
3224 return False
3225 return True
3225 return True
3226
3226
3227 def _delete(name):
3227 def _delete(name):
3228 if name not in existing:
3228 if name not in existing:
3229 raise util.Abort(_('cannot delete queue that does not exist'))
3229 raise util.Abort(_('cannot delete queue that does not exist'))
3230
3230
3231 current = _getcurrent()
3231 current = _getcurrent()
3232
3232
3233 if name == current:
3233 if name == current:
3234 raise util.Abort(_('cannot delete currently active queue'))
3234 raise util.Abort(_('cannot delete currently active queue'))
3235
3235
3236 fh = repo.opener('patches.queues.new', 'w')
3236 fh = repo.opener('patches.queues.new', 'w')
3237 for queue in existing:
3237 for queue in existing:
3238 if queue == name:
3238 if queue == name:
3239 continue
3239 continue
3240 fh.write('%s\n' % (queue,))
3240 fh.write('%s\n' % (queue,))
3241 fh.close()
3241 fh.close()
3242 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3242 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3243
3243
3244 if not name or opts.get('list') or opts.get('active'):
3244 if not name or opts.get('list') or opts.get('active'):
3245 current = _getcurrent()
3245 current = _getcurrent()
3246 if opts.get('active'):
3246 if opts.get('active'):
3247 ui.write('%s\n' % (current,))
3247 ui.write('%s\n' % (current,))
3248 return
3248 return
3249 for queue in _getqueues():
3249 for queue in _getqueues():
3250 ui.write('%s' % (queue,))
3250 ui.write('%s' % (queue,))
3251 if queue == current and not ui.quiet:
3251 if queue == current and not ui.quiet:
3252 ui.write(_(' (active)\n'))
3252 ui.write(_(' (active)\n'))
3253 else:
3253 else:
3254 ui.write('\n')
3254 ui.write('\n')
3255 return
3255 return
3256
3256
3257 if not _validname(name):
3257 if not _validname(name):
3258 raise util.Abort(
3258 raise util.Abort(
3259 _('invalid queue name, may not contain the characters ":\\/."'))
3259 _('invalid queue name, may not contain the characters ":\\/."'))
3260
3260
3261 existing = _getqueues()
3261 existing = _getqueues()
3262
3262
3263 if opts.get('create'):
3263 if opts.get('create'):
3264 if name in existing:
3264 if name in existing:
3265 raise util.Abort(_('queue "%s" already exists') % name)
3265 raise util.Abort(_('queue "%s" already exists') % name)
3266 if _noqueues():
3266 if _noqueues():
3267 _addqueue(_defaultqueue)
3267 _addqueue(_defaultqueue)
3268 _addqueue(name)
3268 _addqueue(name)
3269 _setactive(name)
3269 _setactive(name)
3270 elif opts.get('rename'):
3270 elif opts.get('rename'):
3271 current = _getcurrent()
3271 current = _getcurrent()
3272 if name == current:
3272 if name == current:
3273 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
3273 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
3274 if name in existing:
3274 if name in existing:
3275 raise util.Abort(_('queue "%s" already exists') % name)
3275 raise util.Abort(_('queue "%s" already exists') % name)
3276
3276
3277 olddir = _queuedir(current)
3277 olddir = _queuedir(current)
3278 newdir = _queuedir(name)
3278 newdir = _queuedir(name)
3279
3279
3280 if os.path.exists(newdir):
3280 if os.path.exists(newdir):
3281 raise util.Abort(_('non-queue directory "%s" already exists') %
3281 raise util.Abort(_('non-queue directory "%s" already exists') %
3282 newdir)
3282 newdir)
3283
3283
3284 fh = repo.opener('patches.queues.new', 'w')
3284 fh = repo.opener('patches.queues.new', 'w')
3285 for queue in existing:
3285 for queue in existing:
3286 if queue == current:
3286 if queue == current:
3287 fh.write('%s\n' % (name,))
3287 fh.write('%s\n' % (name,))
3288 if os.path.exists(olddir):
3288 if os.path.exists(olddir):
3289 util.rename(olddir, newdir)
3289 util.rename(olddir, newdir)
3290 else:
3290 else:
3291 fh.write('%s\n' % (queue,))
3291 fh.write('%s\n' % (queue,))
3292 fh.close()
3292 fh.close()
3293 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3293 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3294 _setactivenocheck(name)
3294 _setactivenocheck(name)
3295 elif opts.get('delete'):
3295 elif opts.get('delete'):
3296 _delete(name)
3296 _delete(name)
3297 elif opts.get('purge'):
3297 elif opts.get('purge'):
3298 if name in existing:
3298 if name in existing:
3299 _delete(name)
3299 _delete(name)
3300 qdir = _queuedir(name)
3300 qdir = _queuedir(name)
3301 if os.path.exists(qdir):
3301 if os.path.exists(qdir):
3302 shutil.rmtree(qdir)
3302 shutil.rmtree(qdir)
3303 else:
3303 else:
3304 if name not in existing:
3304 if name not in existing:
3305 raise util.Abort(_('use --create to create a new queue'))
3305 raise util.Abort(_('use --create to create a new queue'))
3306 _setactive(name)
3306 _setactive(name)
3307
3307
3308 def mqphasedefaults(repo, roots):
3308 def mqphasedefaults(repo, roots):
3309 """callback used to set mq changeset as secret when no phase data exists"""
3309 """callback used to set mq changeset as secret when no phase data exists"""
3310 if repo.mq.applied:
3310 if repo.mq.applied:
3311 if repo.ui.configbool('mq', 'secret', False):
3311 if repo.ui.configbool('mq', 'secret', False):
3312 mqphase = phases.secret
3312 mqphase = phases.secret
3313 else:
3313 else:
3314 mqphase = phases.draft
3314 mqphase = phases.draft
3315 qbase = repo[repo.mq.applied[0].node]
3315 qbase = repo[repo.mq.applied[0].node]
3316 roots[mqphase].add(qbase.node())
3316 roots[mqphase].add(qbase.node())
3317 return roots
3317 return roots
3318
3318
3319 def reposetup(ui, repo):
3319 def reposetup(ui, repo):
3320 class mqrepo(repo.__class__):
3320 class mqrepo(repo.__class__):
3321 @util.propertycache
3321 @util.propertycache
3322 def mq(self):
3322 def mq(self):
3323 return queue(self.ui, self.path)
3323 return queue(self.ui, self.path)
3324
3324
3325 def abortifwdirpatched(self, errmsg, force=False):
3325 def abortifwdirpatched(self, errmsg, force=False):
3326 if self.mq.applied and not force:
3326 if self.mq.applied and not force:
3327 parents = self.dirstate.parents()
3327 parents = self.dirstate.parents()
3328 patches = [s.node for s in self.mq.applied]
3328 patches = [s.node for s in self.mq.applied]
3329 if parents[0] in patches or parents[1] in patches:
3329 if parents[0] in patches or parents[1] in patches:
3330 raise util.Abort(errmsg)
3330 raise util.Abort(errmsg)
3331
3331
3332 def commit(self, text="", user=None, date=None, match=None,
3332 def commit(self, text="", user=None, date=None, match=None,
3333 force=False, editor=False, extra={}):
3333 force=False, editor=False, extra={}):
3334 self.abortifwdirpatched(
3334 self.abortifwdirpatched(
3335 _('cannot commit over an applied mq patch'),
3335 _('cannot commit over an applied mq patch'),
3336 force)
3336 force)
3337
3337
3338 return super(mqrepo, self).commit(text, user, date, match, force,
3338 return super(mqrepo, self).commit(text, user, date, match, force,
3339 editor, extra)
3339 editor, extra)
3340
3340
3341 def checkpush(self, force, revs):
3341 def checkpush(self, force, revs):
3342 if self.mq.applied and not force:
3342 if self.mq.applied and not force:
3343 outapplied = [e.node for e in self.mq.applied]
3343 outapplied = [e.node for e in self.mq.applied]
3344 if revs:
3344 if revs:
3345 # Assume applied patches have no non-patch descendants and
3345 # Assume applied patches have no non-patch descendants and
3346 # are not on remote already. Filtering any changeset not
3346 # are not on remote already. Filtering any changeset not
3347 # pushed.
3347 # pushed.
3348 heads = set(revs)
3348 heads = set(revs)
3349 for node in reversed(outapplied):
3349 for node in reversed(outapplied):
3350 if node in heads:
3350 if node in heads:
3351 break
3351 break
3352 else:
3352 else:
3353 outapplied.pop()
3353 outapplied.pop()
3354 # looking for pushed and shared changeset
3354 # looking for pushed and shared changeset
3355 for node in outapplied:
3355 for node in outapplied:
3356 if repo[node].phase() < phases.secret:
3356 if repo[node].phase() < phases.secret:
3357 raise util.Abort(_('source has mq patches applied'))
3357 raise util.Abort(_('source has mq patches applied'))
3358 # no non-secret patches pushed
3358 # no non-secret patches pushed
3359 super(mqrepo, self).checkpush(force, revs)
3359 super(mqrepo, self).checkpush(force, revs)
3360
3360
3361 def _findtags(self):
3361 def _findtags(self):
3362 '''augment tags from base class with patch tags'''
3362 '''augment tags from base class with patch tags'''
3363 result = super(mqrepo, self)._findtags()
3363 result = super(mqrepo, self)._findtags()
3364
3364
3365 q = self.mq
3365 q = self.mq
3366 if not q.applied:
3366 if not q.applied:
3367 return result
3367 return result
3368
3368
3369 mqtags = [(patch.node, patch.name) for patch in q.applied]
3369 mqtags = [(patch.node, patch.name) for patch in q.applied]
3370
3370
3371 try:
3371 try:
3372 self.changelog.rev(mqtags[-1][0])
3372 self.changelog.rev(mqtags[-1][0])
3373 except error.LookupError:
3373 except error.LookupError:
3374 self.ui.warn(_('mq status file refers to unknown node %s\n')
3374 self.ui.warn(_('mq status file refers to unknown node %s\n')
3375 % short(mqtags[-1][0]))
3375 % short(mqtags[-1][0]))
3376 return result
3376 return result
3377
3377
3378 mqtags.append((mqtags[-1][0], 'qtip'))
3378 mqtags.append((mqtags[-1][0], 'qtip'))
3379 mqtags.append((mqtags[0][0], 'qbase'))
3379 mqtags.append((mqtags[0][0], 'qbase'))
3380 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3380 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3381 tags = result[0]
3381 tags = result[0]
3382 for patch in mqtags:
3382 for patch in mqtags:
3383 if patch[1] in tags:
3383 if patch[1] in tags:
3384 self.ui.warn(_('Tag %s overrides mq patch of the same '
3384 self.ui.warn(_('Tag %s overrides mq patch of the same '
3385 'name\n') % patch[1])
3385 'name\n') % patch[1])
3386 else:
3386 else:
3387 tags[patch[1]] = patch[0]
3387 tags[patch[1]] = patch[0]
3388
3388
3389 return result
3389 return result
3390
3390
3391 def _branchtags(self, partial, lrev):
3391 def _branchtags(self, partial, lrev):
3392 q = self.mq
3392 q = self.mq
3393 cl = self.changelog
3393 cl = self.changelog
3394 qbase = None
3394 qbase = None
3395 if not q.applied:
3395 if not q.applied:
3396 if getattr(self, '_committingpatch', False):
3396 if getattr(self, '_committingpatch', False):
3397 # Committing a new patch, must be tip
3397 # Committing a new patch, must be tip
3398 qbase = len(cl) - 1
3398 qbase = len(cl) - 1
3399 else:
3399 else:
3400 qbasenode = q.applied[0].node
3400 qbasenode = q.applied[0].node
3401 try:
3401 try:
3402 qbase = cl.rev(qbasenode)
3402 qbase = cl.rev(qbasenode)
3403 except error.LookupError:
3403 except error.LookupError:
3404 self.ui.warn(_('mq status file refers to unknown node %s\n')
3404 self.ui.warn(_('mq status file refers to unknown node %s\n')
3405 % short(qbasenode))
3405 % short(qbasenode))
3406 if qbase is None:
3406 if qbase is None:
3407 return super(mqrepo, self)._branchtags(partial, lrev)
3407 return super(mqrepo, self)._branchtags(partial, lrev)
3408
3408
3409 start = lrev + 1
3409 start = lrev + 1
3410 if start < qbase:
3410 if start < qbase:
3411 # update the cache (excluding the patches) and save it
3411 # update the cache (excluding the patches) and save it
3412 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
3412 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
3413 self._updatebranchcache(partial, ctxgen)
3413 self._updatebranchcache(partial, ctxgen)
3414 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
3414 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
3415 start = qbase
3415 start = qbase
3416 # if start = qbase, the cache is as updated as it should be.
3416 # if start = qbase, the cache is as updated as it should be.
3417 # if start > qbase, the cache includes (part of) the patches.
3417 # if start > qbase, the cache includes (part of) the patches.
3418 # we might as well use it, but we won't save it.
3418 # we might as well use it, but we won't save it.
3419
3419
3420 # update the cache up to the tip
3420 # update the cache up to the tip
3421 ctxgen = (self[r] for r in xrange(start, len(cl)))
3421 ctxgen = (self[r] for r in xrange(start, len(cl)))
3422 self._updatebranchcache(partial, ctxgen)
3422 self._updatebranchcache(partial, ctxgen)
3423
3423
3424 return partial
3424 return partial
3425
3425
3426 if repo.local():
3426 if repo.local():
3427 repo.__class__ = mqrepo
3427 repo.__class__ = mqrepo
3428
3428
3429 repo._phasedefaults.append(mqphasedefaults)
3429 repo._phasedefaults.append(mqphasedefaults)
3430
3430
3431 def mqimport(orig, ui, repo, *args, **kwargs):
3431 def mqimport(orig, ui, repo, *args, **kwargs):
3432 if (util.safehasattr(repo, 'abortifwdirpatched')
3432 if (util.safehasattr(repo, 'abortifwdirpatched')
3433 and not kwargs.get('no_commit', False)):
3433 and not kwargs.get('no_commit', False)):
3434 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3434 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3435 kwargs.get('force'))
3435 kwargs.get('force'))
3436 return orig(ui, repo, *args, **kwargs)
3436 return orig(ui, repo, *args, **kwargs)
3437
3437
3438 def mqinit(orig, ui, *args, **kwargs):
3438 def mqinit(orig, ui, *args, **kwargs):
3439 mq = kwargs.pop('mq', None)
3439 mq = kwargs.pop('mq', None)
3440
3440
3441 if not mq:
3441 if not mq:
3442 return orig(ui, *args, **kwargs)
3442 return orig(ui, *args, **kwargs)
3443
3443
3444 if args:
3444 if args:
3445 repopath = args[0]
3445 repopath = args[0]
3446 if not hg.islocal(repopath):
3446 if not hg.islocal(repopath):
3447 raise util.Abort(_('only a local queue repository '
3447 raise util.Abort(_('only a local queue repository '
3448 'may be initialized'))
3448 'may be initialized'))
3449 else:
3449 else:
3450 repopath = cmdutil.findrepo(os.getcwd())
3450 repopath = cmdutil.findrepo(os.getcwd())
3451 if not repopath:
3451 if not repopath:
3452 raise util.Abort(_('there is no Mercurial repository here '
3452 raise util.Abort(_('there is no Mercurial repository here '
3453 '(.hg not found)'))
3453 '(.hg not found)'))
3454 repo = hg.repository(ui, repopath)
3454 repo = hg.repository(ui, repopath)
3455 return qinit(ui, repo, True)
3455 return qinit(ui, repo, True)
3456
3456
3457 def mqcommand(orig, ui, repo, *args, **kwargs):
3457 def mqcommand(orig, ui, repo, *args, **kwargs):
3458 """Add --mq option to operate on patch repository instead of main"""
3458 """Add --mq option to operate on patch repository instead of main"""
3459
3459
3460 # some commands do not like getting unknown options
3460 # some commands do not like getting unknown options
3461 mq = kwargs.pop('mq', None)
3461 mq = kwargs.pop('mq', None)
3462
3462
3463 if not mq:
3463 if not mq:
3464 return orig(ui, repo, *args, **kwargs)
3464 return orig(ui, repo, *args, **kwargs)
3465
3465
3466 q = repo.mq
3466 q = repo.mq
3467 r = q.qrepo()
3467 r = q.qrepo()
3468 if not r:
3468 if not r:
3469 raise util.Abort(_('no queue repository'))
3469 raise util.Abort(_('no queue repository'))
3470 return orig(r.ui, r, *args, **kwargs)
3470 return orig(r.ui, r, *args, **kwargs)
3471
3471
3472 def summary(orig, ui, repo, *args, **kwargs):
3472 def summary(orig, ui, repo, *args, **kwargs):
3473 r = orig(ui, repo, *args, **kwargs)
3473 r = orig(ui, repo, *args, **kwargs)
3474 q = repo.mq
3474 q = repo.mq
3475 m = []
3475 m = []
3476 a, u = len(q.applied), len(q.unapplied(repo))
3476 a, u = len(q.applied), len(q.unapplied(repo))
3477 if a:
3477 if a:
3478 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3478 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3479 if u:
3479 if u:
3480 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3480 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3481 if m:
3481 if m:
3482 ui.write("mq: %s\n" % ', '.join(m))
3482 ui.write("mq: %s\n" % ', '.join(m))
3483 else:
3483 else:
3484 ui.note(_("mq: (empty queue)\n"))
3484 ui.note(_("mq: (empty queue)\n"))
3485 return r
3485 return r
3486
3486
3487 def revsetmq(repo, subset, x):
3487 def revsetmq(repo, subset, x):
3488 """``mq()``
3488 """``mq()``
3489 Changesets managed by MQ.
3489 Changesets managed by MQ.
3490 """
3490 """
3491 revset.getargs(x, 0, 0, _("mq takes no arguments"))
3491 revset.getargs(x, 0, 0, _("mq takes no arguments"))
3492 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3492 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3493 return [r for r in subset if r in applied]
3493 return [r for r in subset if r in applied]
3494
3494
3495 def extsetup(ui):
3495 def extsetup(ui):
3496 revset.symbols['mq'] = revsetmq
3496 revset.symbols['mq'] = revsetmq
3497
3497
3498 # tell hggettext to extract docstrings from these functions:
3498 # tell hggettext to extract docstrings from these functions:
3499 i18nfunctions = [revsetmq]
3499 i18nfunctions = [revsetmq]
3500
3500
3501 def uisetup(ui):
3501 def uisetup(ui):
3502 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3502 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3503
3503
3504 extensions.wrapcommand(commands.table, 'import', mqimport)
3504 extensions.wrapcommand(commands.table, 'import', mqimport)
3505 extensions.wrapcommand(commands.table, 'summary', summary)
3505 extensions.wrapcommand(commands.table, 'summary', summary)
3506
3506
3507 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3507 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3508 entry[1].extend(mqopt)
3508 entry[1].extend(mqopt)
3509
3509
3510 nowrap = set(commands.norepo.split(" "))
3510 nowrap = set(commands.norepo.split(" "))
3511
3511
3512 def dotable(cmdtable):
3512 def dotable(cmdtable):
3513 for cmd in cmdtable.keys():
3513 for cmd in cmdtable.keys():
3514 cmd = cmdutil.parsealiases(cmd)[0]
3514 cmd = cmdutil.parsealiases(cmd)[0]
3515 if cmd in nowrap:
3515 if cmd in nowrap:
3516 continue
3516 continue
3517 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3517 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3518 entry[1].extend(mqopt)
3518 entry[1].extend(mqopt)
3519
3519
3520 dotable(commands.table)
3520 dotable(commands.table)
3521
3521
3522 for extname, extmodule in extensions.extensions():
3522 for extname, extmodule in extensions.extensions():
3523 if extmodule.__file__ != __file__:
3523 if extmodule.__file__ != __file__:
3524 dotable(getattr(extmodule, 'cmdtable', {}))
3524 dotable(getattr(extmodule, 'cmdtable', {}))
3525
3525
3526
3526
3527 colortable = {'qguard.negative': 'red',
3527 colortable = {'qguard.negative': 'red',
3528 'qguard.positive': 'yellow',
3528 'qguard.positive': 'yellow',
3529 'qguard.unguarded': 'green',
3529 'qguard.unguarded': 'green',
3530 'qseries.applied': 'blue bold underline',
3530 'qseries.applied': 'blue bold underline',
3531 'qseries.guarded': 'black bold',
3531 'qseries.guarded': 'black bold',
3532 'qseries.missing': 'red bold',
3532 'qseries.missing': 'red bold',
3533 'qseries.unapplied': 'black bold'}
3533 'qseries.unapplied': 'black bold'}
@@ -1,557 +1,557 b''
1 # patchbomb.py - sending Mercurial changesets as patch emails
1 # patchbomb.py - sending Mercurial changesets as patch emails
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to send changesets as (a series of) patch emails
8 '''command to send changesets as (a series of) patch emails
9
9
10 The series is started off with a "[PATCH 0 of N]" introduction, which
10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 describes the series as a whole.
11 describes the series as a whole.
12
12
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 first line of the changeset description as the subject text. The
14 first line of the changeset description as the subject text. The
15 message contains two or three body parts:
15 message contains two or three body parts:
16
16
17 - The changeset description.
17 - The changeset description.
18 - [Optional] The result of running diffstat on the patch.
18 - [Optional] The result of running diffstat on the patch.
19 - The patch itself, as generated by :hg:`export`.
19 - The patch itself, as generated by :hg:`export`.
20
20
21 Each message refers to the first in the series using the In-Reply-To
21 Each message refers to the first in the series using the In-Reply-To
22 and References headers, so they will show up as a sequence in threaded
22 and References headers, so they will show up as a sequence in threaded
23 mail and news readers, and in mail archives.
23 mail and news readers, and in mail archives.
24
24
25 To configure other defaults, add a section like this to your
25 To configure other defaults, add a section like this to your
26 configuration file::
26 configuration file::
27
27
28 [email]
28 [email]
29 from = My Name <my@email>
29 from = My Name <my@email>
30 to = recipient1, recipient2, ...
30 to = recipient1, recipient2, ...
31 cc = cc1, cc2, ...
31 cc = cc1, cc2, ...
32 bcc = bcc1, bcc2, ...
32 bcc = bcc1, bcc2, ...
33 reply-to = address1, address2, ...
33 reply-to = address1, address2, ...
34
34
35 Use ``[patchbomb]`` as configuration section name if you need to
35 Use ``[patchbomb]`` as configuration section name if you need to
36 override global ``[email]`` address settings.
36 override global ``[email]`` address settings.
37
37
38 Then you can use the :hg:`email` command to mail a series of
38 Then you can use the :hg:`email` command to mail a series of
39 changesets as a patchbomb.
39 changesets as a patchbomb.
40
40
41 You can also either configure the method option in the email section
41 You can also either configure the method option in the email section
42 to be a sendmail compatible mailer or fill out the [smtp] section so
42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 that the patchbomb extension can automatically send patchbombs
43 that the patchbomb extension can automatically send patchbombs
44 directly from the commandline. See the [email] and [smtp] sections in
44 directly from the commandline. See the [email] and [smtp] sections in
45 hgrc(5) for details.
45 hgrc(5) for details.
46 '''
46 '''
47
47
48 import os, errno, socket, tempfile, cStringIO
48 import os, errno, socket, tempfile, cStringIO
49 import email.MIMEMultipart, email.MIMEBase
49 import email.MIMEMultipart, email.MIMEBase
50 import email.Utils, email.Encoders, email.Generator
50 import email.Utils, email.Encoders, email.Generator
51 from mercurial import cmdutil, commands, hg, mail, patch, util, discovery
51 from mercurial import cmdutil, commands, hg, mail, patch, util, discovery
52 from mercurial import scmutil
52 from mercurial import scmutil
53 from mercurial.i18n import _
53 from mercurial.i18n import _
54 from mercurial.node import bin
54 from mercurial.node import bin
55
55
56 cmdtable = {}
56 cmdtable = {}
57 command = cmdutil.command(cmdtable)
57 command = cmdutil.command(cmdtable)
58
58
59 def prompt(ui, prompt, default=None, rest=':'):
59 def prompt(ui, prompt, default=None, rest=':'):
60 if default:
60 if default:
61 prompt += ' [%s]' % default
61 prompt += ' [%s]' % default
62 return ui.prompt(prompt + rest, default)
62 return ui.prompt(prompt + rest, default)
63
63
64 def introwanted(opts, number):
64 def introwanted(opts, number):
65 '''is an introductory message apparently wanted?'''
65 '''is an introductory message apparently wanted?'''
66 return number > 1 or opts.get('intro') or opts.get('desc')
66 return number > 1 or opts.get('intro') or opts.get('desc')
67
67
68 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
68 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
69 patchname=None):
69 patchname=None):
70
70
71 desc = []
71 desc = []
72 node = None
72 node = None
73 body = ''
73 body = ''
74
74
75 for line in patchlines:
75 for line in patchlines:
76 if line.startswith('#'):
76 if line.startswith('#'):
77 if line.startswith('# Node ID'):
77 if line.startswith('# Node ID'):
78 node = line.split()[-1]
78 node = line.split()[-1]
79 continue
79 continue
80 if line.startswith('diff -r') or line.startswith('diff --git'):
80 if line.startswith('diff -r') or line.startswith('diff --git'):
81 break
81 break
82 desc.append(line)
82 desc.append(line)
83
83
84 if not patchname and not node:
84 if not patchname and not node:
85 raise ValueError
85 raise ValueError
86
86
87 if opts.get('attach') and not opts.get('body'):
87 if opts.get('attach') and not opts.get('body'):
88 body = ('\n'.join(desc[1:]).strip() or
88 body = ('\n'.join(desc[1:]).strip() or
89 'Patch subject is complete summary.')
89 'Patch subject is complete summary.')
90 body += '\n\n\n'
90 body += '\n\n\n'
91
91
92 if opts.get('plain'):
92 if opts.get('plain'):
93 while patchlines and patchlines[0].startswith('# '):
93 while patchlines and patchlines[0].startswith('# '):
94 patchlines.pop(0)
94 patchlines.pop(0)
95 if patchlines:
95 if patchlines:
96 patchlines.pop(0)
96 patchlines.pop(0)
97 while patchlines and not patchlines[0].strip():
97 while patchlines and not patchlines[0].strip():
98 patchlines.pop(0)
98 patchlines.pop(0)
99
99
100 ds = patch.diffstat(patchlines, git=opts.get('git'))
100 ds = patch.diffstat(patchlines, git=opts.get('git'))
101 if opts.get('diffstat'):
101 if opts.get('diffstat'):
102 body += ds + '\n\n'
102 body += ds + '\n\n'
103
103
104 addattachment = opts.get('attach') or opts.get('inline')
104 addattachment = opts.get('attach') or opts.get('inline')
105 if not addattachment or opts.get('body'):
105 if not addattachment or opts.get('body'):
106 body += '\n'.join(patchlines)
106 body += '\n'.join(patchlines)
107
107
108 if addattachment:
108 if addattachment:
109 msg = email.MIMEMultipart.MIMEMultipart()
109 msg = email.MIMEMultipart.MIMEMultipart()
110 if body:
110 if body:
111 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
111 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
112 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
112 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
113 opts.get('test'))
113 opts.get('test'))
114 binnode = bin(node)
114 binnode = bin(node)
115 # if node is mq patch, it will have the patch file's name as a tag
115 # if node is mq patch, it will have the patch file's name as a tag
116 if not patchname:
116 if not patchname:
117 patchtags = [t for t in repo.nodetags(binnode)
117 patchtags = [t for t in repo.nodetags(binnode)
118 if t.endswith('.patch') or t.endswith('.diff')]
118 if t.endswith('.patch') or t.endswith('.diff')]
119 if patchtags:
119 if patchtags:
120 patchname = patchtags[0]
120 patchname = patchtags[0]
121 elif total > 1:
121 elif total > 1:
122 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
122 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
123 binnode, seqno=idx,
123 binnode, seqno=idx,
124 total=total)
124 total=total)
125 else:
125 else:
126 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
126 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
127 disposition = 'inline'
127 disposition = 'inline'
128 if opts.get('attach'):
128 if opts.get('attach'):
129 disposition = 'attachment'
129 disposition = 'attachment'
130 p['Content-Disposition'] = disposition + '; filename=' + patchname
130 p['Content-Disposition'] = disposition + '; filename=' + patchname
131 msg.attach(p)
131 msg.attach(p)
132 else:
132 else:
133 msg = mail.mimetextpatch(body, display=opts.get('test'))
133 msg = mail.mimetextpatch(body, display=opts.get('test'))
134
134
135 flag = ' '.join(opts.get('flag'))
135 flag = ' '.join(opts.get('flag'))
136 if flag:
136 if flag:
137 flag = ' ' + flag
137 flag = ' ' + flag
138
138
139 subj = desc[0].strip().rstrip('. ')
139 subj = desc[0].strip().rstrip('. ')
140 if not numbered:
140 if not numbered:
141 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
141 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
142 else:
142 else:
143 tlen = len(str(total))
143 tlen = len(str(total))
144 subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
144 subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
145 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
145 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
146 msg['X-Mercurial-Node'] = node
146 msg['X-Mercurial-Node'] = node
147 return msg, subj, ds
147 return msg, subj, ds
148
148
149 emailopts = [
149 emailopts = [
150 ('', 'body', None, _('send patches as inline message text (default)')),
150 ('', 'body', None, _('send patches as inline message text (default)')),
151 ('a', 'attach', None, _('send patches as attachments')),
151 ('a', 'attach', None, _('send patches as attachments')),
152 ('i', 'inline', None, _('send patches as inline attachments')),
152 ('i', 'inline', None, _('send patches as inline attachments')),
153 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
153 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
154 ('c', 'cc', [], _('email addresses of copy recipients')),
154 ('c', 'cc', [], _('email addresses of copy recipients')),
155 ('', 'confirm', None, _('ask for confirmation before sending')),
155 ('', 'confirm', None, _('ask for confirmation before sending')),
156 ('d', 'diffstat', None, _('add diffstat output to messages')),
156 ('d', 'diffstat', None, _('add diffstat output to messages')),
157 ('', 'date', '', _('use the given date as the sending date')),
157 ('', 'date', '', _('use the given date as the sending date')),
158 ('', 'desc', '', _('use the given file as the series description')),
158 ('', 'desc', '', _('use the given file as the series description')),
159 ('f', 'from', '', _('email address of sender')),
159 ('f', 'from', '', _('email address of sender')),
160 ('n', 'test', None, _('print messages that would be sent')),
160 ('n', 'test', None, _('print messages that would be sent')),
161 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
161 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
162 ('', 'reply-to', [], _('email addresses replies should be sent to')),
162 ('', 'reply-to', [], _('email addresses replies should be sent to')),
163 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
163 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
164 ('', 'in-reply-to', '', _('message identifier to reply to')),
164 ('', 'in-reply-to', '', _('message identifier to reply to')),
165 ('', 'flag', [], _('flags to add in subject prefixes')),
165 ('', 'flag', [], _('flags to add in subject prefixes')),
166 ('t', 'to', [], _('email addresses of recipients'))]
166 ('t', 'to', [], _('email addresses of recipients'))]
167
167
168 @command('email',
168 @command('email',
169 [('g', 'git', None, _('use git extended diff format')),
169 [('g', 'git', None, _('use git extended diff format')),
170 ('', 'plain', None, _('omit hg patch header')),
170 ('', 'plain', None, _('omit hg patch header')),
171 ('o', 'outgoing', None,
171 ('o', 'outgoing', None,
172 _('send changes not found in the target repository')),
172 _('send changes not found in the target repository')),
173 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
173 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
174 ('', 'bundlename', 'bundle',
174 ('', 'bundlename', 'bundle',
175 _('name of the bundle attachment file'), _('NAME')),
175 _('name of the bundle attachment file'), _('NAME')),
176 ('r', 'rev', [], _('a revision to send'), _('REV')),
176 ('r', 'rev', [], _('a revision to send'), _('REV')),
177 ('', 'force', None, _('run even when remote repository is unrelated '
177 ('', 'force', None, _('run even when remote repository is unrelated '
178 '(with -b/--bundle)')),
178 '(with -b/--bundle)')),
179 ('', 'base', [], _('a base changeset to specify instead of a destination '
179 ('', 'base', [], _('a base changeset to specify instead of a destination '
180 '(with -b/--bundle)'), _('REV')),
180 '(with -b/--bundle)'), _('REV')),
181 ('', 'intro', None, _('send an introduction email for a single patch')),
181 ('', 'intro', None, _('send an introduction email for a single patch')),
182 ] + emailopts + commands.remoteopts,
182 ] + emailopts + commands.remoteopts,
183 _('hg email [OPTION]... [DEST]...'))
183 _('hg email [OPTION]... [DEST]...'))
184 def patchbomb(ui, repo, *revs, **opts):
184 def patchbomb(ui, repo, *revs, **opts):
185 '''send changesets by email
185 '''send changesets by email
186
186
187 By default, diffs are sent in the format generated by
187 By default, diffs are sent in the format generated by
188 :hg:`export`, one per message. The series starts with a "[PATCH 0
188 :hg:`export`, one per message. The series starts with a "[PATCH 0
189 of N]" introduction, which describes the series as a whole.
189 of N]" introduction, which describes the series as a whole.
190
190
191 Each patch email has a Subject line of "[PATCH M of N] ...", using
191 Each patch email has a Subject line of "[PATCH M of N] ...", using
192 the first line of the changeset description as the subject text.
192 the first line of the changeset description as the subject text.
193 The message contains two or three parts. First, the changeset
193 The message contains two or three parts. First, the changeset
194 description.
194 description.
195
195
196 With the -d/--diffstat option, if the diffstat program is
196 With the -d/--diffstat option, if the diffstat program is
197 installed, the result of running diffstat on the patch is inserted.
197 installed, the result of running diffstat on the patch is inserted.
198
198
199 Finally, the patch itself, as generated by :hg:`export`.
199 Finally, the patch itself, as generated by :hg:`export`.
200
200
201 With the -d/--diffstat or -c/--confirm options, you will be presented
201 With the -d/--diffstat or -c/--confirm options, you will be presented
202 with a final summary of all messages and asked for confirmation before
202 with a final summary of all messages and asked for confirmation before
203 the messages are sent.
203 the messages are sent.
204
204
205 By default the patch is included as text in the email body for
205 By default the patch is included as text in the email body for
206 easy reviewing. Using the -a/--attach option will instead create
206 easy reviewing. Using the -a/--attach option will instead create
207 an attachment for the patch. With -i/--inline an inline attachment
207 an attachment for the patch. With -i/--inline an inline attachment
208 will be created. You can include a patch both as text in the email
208 will be created. You can include a patch both as text in the email
209 body and as a regular or an inline attachment by combining the
209 body and as a regular or an inline attachment by combining the
210 -a/--attach or -i/--inline with the --body option.
210 -a/--attach or -i/--inline with the --body option.
211
211
212 With -o/--outgoing, emails will be generated for patches not found
212 With -o/--outgoing, emails will be generated for patches not found
213 in the destination repository (or only those which are ancestors
213 in the destination repository (or only those which are ancestors
214 of the specified revisions if any are provided)
214 of the specified revisions if any are provided)
215
215
216 With -b/--bundle, changesets are selected as for --outgoing, but a
216 With -b/--bundle, changesets are selected as for --outgoing, but a
217 single email containing a binary Mercurial bundle as an attachment
217 single email containing a binary Mercurial bundle as an attachment
218 will be sent.
218 will be sent.
219
219
220 With -m/--mbox, instead of previewing each patchbomb message in a
220 With -m/--mbox, instead of previewing each patchbomb message in a
221 pager or sending the messages directly, it will create a UNIX
221 pager or sending the messages directly, it will create a UNIX
222 mailbox file with the patch emails. This mailbox file can be
222 mailbox file with the patch emails. This mailbox file can be
223 previewed with any mail user agent which supports UNIX mbox
223 previewed with any mail user agent which supports UNIX mbox
224 files.
224 files.
225
225
226 With -n/--test, all steps will run, but mail will not be sent.
226 With -n/--test, all steps will run, but mail will not be sent.
227 You will be prompted for an email recipient address, a subject and
227 You will be prompted for an email recipient address, a subject and
228 an introductory message describing the patches of your patchbomb.
228 an introductory message describing the patches of your patchbomb.
229 Then when all is done, patchbomb messages are displayed. If the
229 Then when all is done, patchbomb messages are displayed. If the
230 PAGER environment variable is set, your pager will be fired up once
230 PAGER environment variable is set, your pager will be fired up once
231 for each patchbomb message, so you can verify everything is alright.
231 for each patchbomb message, so you can verify everything is alright.
232
232
233 In case email sending fails, you will find a backup of your series
233 In case email sending fails, you will find a backup of your series
234 introductory message in ``.hg/last-email.txt``.
234 introductory message in ``.hg/last-email.txt``.
235
235
236 Examples::
236 Examples::
237
237
238 hg email -r 3000 # send patch 3000 only
238 hg email -r 3000 # send patch 3000 only
239 hg email -r 3000 -r 3001 # send patches 3000 and 3001
239 hg email -r 3000 -r 3001 # send patches 3000 and 3001
240 hg email -r 3000:3005 # send patches 3000 through 3005
240 hg email -r 3000:3005 # send patches 3000 through 3005
241 hg email 3000 # send patch 3000 (deprecated)
241 hg email 3000 # send patch 3000 (deprecated)
242
242
243 hg email -o # send all patches not in default
243 hg email -o # send all patches not in default
244 hg email -o DEST # send all patches not in DEST
244 hg email -o DEST # send all patches not in DEST
245 hg email -o -r 3000 # send all ancestors of 3000 not in default
245 hg email -o -r 3000 # send all ancestors of 3000 not in default
246 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
246 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
247
247
248 hg email -b # send bundle of all patches not in default
248 hg email -b # send bundle of all patches not in default
249 hg email -b DEST # send bundle of all patches not in DEST
249 hg email -b DEST # send bundle of all patches not in DEST
250 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
250 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
251 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
251 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
252
252
253 hg email -o -m mbox && # generate an mbox file...
253 hg email -o -m mbox && # generate an mbox file...
254 mutt -R -f mbox # ... and view it with mutt
254 mutt -R -f mbox # ... and view it with mutt
255 hg email -o -m mbox && # generate an mbox file ...
255 hg email -o -m mbox && # generate an mbox file ...
256 formail -s sendmail \\ # ... and use formail to send from the mbox
256 formail -s sendmail \\ # ... and use formail to send from the mbox
257 -bm -t < mbox # ... using sendmail
257 -bm -t < mbox # ... using sendmail
258
258
259 Before using this command, you will need to enable email in your
259 Before using this command, you will need to enable email in your
260 hgrc. See the [email] section in hgrc(5) for details.
260 hgrc. See the [email] section in hgrc(5) for details.
261 '''
261 '''
262
262
263 _charsets = mail._charsets(ui)
263 _charsets = mail._charsets(ui)
264
264
265 bundle = opts.get('bundle')
265 bundle = opts.get('bundle')
266 date = opts.get('date')
266 date = opts.get('date')
267 mbox = opts.get('mbox')
267 mbox = opts.get('mbox')
268 outgoing = opts.get('outgoing')
268 outgoing = opts.get('outgoing')
269 rev = opts.get('rev')
269 rev = opts.get('rev')
270 # internal option used by pbranches
270 # internal option used by pbranches
271 patches = opts.get('patches')
271 patches = opts.get('patches')
272
272
273 def getoutgoing(dest, revs):
273 def getoutgoing(dest, revs):
274 '''Return the revisions present locally but not in dest'''
274 '''Return the revisions present locally but not in dest'''
275 dest = ui.expandpath(dest or 'default-push', dest or 'default')
275 dest = ui.expandpath(dest or 'default-push', dest or 'default')
276 dest, branches = hg.parseurl(dest)
276 dest, branches = hg.parseurl(dest)
277 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
277 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
278 other = hg.peer(repo, opts, dest)
278 other = hg.peer(repo, opts, dest)
279 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
279 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
280 common, _anyinc, _heads = discovery.findcommonincoming(repo, other)
280 common, _anyinc, _heads = discovery.findcommonincoming(repo, other)
281 nodes = revs and map(repo.lookup, revs) or revs
281 nodes = revs and map(repo.lookup, revs) or revs
282 o = repo.changelog.findmissing(common, heads=nodes)
282 o = repo.changelog.findmissing(common, heads=nodes)
283 if not o:
283 if not o:
284 ui.status(_("no changes found\n"))
284 ui.status(_("no changes found\n"))
285 return []
285 return []
286 return [str(repo.changelog.rev(r)) for r in o]
286 return [str(repo.changelog.rev(r)) for r in o]
287
287
288 def getpatches(revs):
288 def getpatches(revs):
289 for r in scmutil.revrange(repo, revs):
289 for r in scmutil.revrange(repo, revs):
290 output = cStringIO.StringIO()
290 output = cStringIO.StringIO()
291 cmdutil.export(repo, [r], fp=output,
291 cmdutil.export(repo, [r], fp=output,
292 opts=patch.diffopts(ui, opts))
292 opts=patch.diffopts(ui, opts))
293 yield output.getvalue().split('\n')
293 yield output.getvalue().split('\n')
294
294
295 def getbundle(dest):
295 def getbundle(dest):
296 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
296 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
297 tmpfn = os.path.join(tmpdir, 'bundle')
297 tmpfn = os.path.join(tmpdir, 'bundle')
298 try:
298 try:
299 commands.bundle(ui, repo, tmpfn, dest, **opts)
299 commands.bundle(ui, repo, tmpfn, dest, **opts)
300 fp = open(tmpfn, 'rb')
300 fp = open(tmpfn, 'rb')
301 data = fp.read()
301 data = fp.read()
302 fp.close()
302 fp.close()
303 return data
303 return data
304 finally:
304 finally:
305 try:
305 try:
306 os.unlink(tmpfn)
306 os.unlink(tmpfn)
307 except:
307 except OSError:
308 pass
308 pass
309 os.rmdir(tmpdir)
309 os.rmdir(tmpdir)
310
310
311 if not (opts.get('test') or mbox):
311 if not (opts.get('test') or mbox):
312 # really sending
312 # really sending
313 mail.validateconfig(ui)
313 mail.validateconfig(ui)
314
314
315 if not (revs or rev or outgoing or bundle or patches):
315 if not (revs or rev or outgoing or bundle or patches):
316 raise util.Abort(_('specify at least one changeset with -r or -o'))
316 raise util.Abort(_('specify at least one changeset with -r or -o'))
317
317
318 if outgoing and bundle:
318 if outgoing and bundle:
319 raise util.Abort(_("--outgoing mode always on with --bundle;"
319 raise util.Abort(_("--outgoing mode always on with --bundle;"
320 " do not re-specify --outgoing"))
320 " do not re-specify --outgoing"))
321
321
322 if outgoing or bundle:
322 if outgoing or bundle:
323 if len(revs) > 1:
323 if len(revs) > 1:
324 raise util.Abort(_("too many destinations"))
324 raise util.Abort(_("too many destinations"))
325 dest = revs and revs[0] or None
325 dest = revs and revs[0] or None
326 revs = []
326 revs = []
327
327
328 if rev:
328 if rev:
329 if revs:
329 if revs:
330 raise util.Abort(_('use only one form to specify the revision'))
330 raise util.Abort(_('use only one form to specify the revision'))
331 revs = rev
331 revs = rev
332
332
333 if outgoing:
333 if outgoing:
334 revs = getoutgoing(dest, rev)
334 revs = getoutgoing(dest, rev)
335 if bundle:
335 if bundle:
336 opts['revs'] = revs
336 opts['revs'] = revs
337
337
338 # start
338 # start
339 if date:
339 if date:
340 start_time = util.parsedate(date)
340 start_time = util.parsedate(date)
341 else:
341 else:
342 start_time = util.makedate()
342 start_time = util.makedate()
343
343
344 def genmsgid(id):
344 def genmsgid(id):
345 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
345 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
346
346
347 def getdescription(body, sender):
347 def getdescription(body, sender):
348 if opts.get('desc'):
348 if opts.get('desc'):
349 body = open(opts.get('desc')).read()
349 body = open(opts.get('desc')).read()
350 else:
350 else:
351 ui.write(_('\nWrite the introductory message for the '
351 ui.write(_('\nWrite the introductory message for the '
352 'patch series.\n\n'))
352 'patch series.\n\n'))
353 body = ui.edit(body, sender)
353 body = ui.edit(body, sender)
354 # Save series description in case sendmail fails
354 # Save series description in case sendmail fails
355 msgfile = repo.opener('last-email.txt', 'wb')
355 msgfile = repo.opener('last-email.txt', 'wb')
356 msgfile.write(body)
356 msgfile.write(body)
357 msgfile.close()
357 msgfile.close()
358 return body
358 return body
359
359
360 def getpatchmsgs(patches, patchnames=None):
360 def getpatchmsgs(patches, patchnames=None):
361 msgs = []
361 msgs = []
362
362
363 ui.write(_('This patch series consists of %d patches.\n\n')
363 ui.write(_('This patch series consists of %d patches.\n\n')
364 % len(patches))
364 % len(patches))
365
365
366 # build the intro message, or skip it if the user declines
366 # build the intro message, or skip it if the user declines
367 if introwanted(opts, len(patches)):
367 if introwanted(opts, len(patches)):
368 msg = makeintro(patches)
368 msg = makeintro(patches)
369 if msg:
369 if msg:
370 msgs.append(msg)
370 msgs.append(msg)
371
371
372 # are we going to send more than one message?
372 # are we going to send more than one message?
373 numbered = len(msgs) + len(patches) > 1
373 numbered = len(msgs) + len(patches) > 1
374
374
375 # now generate the actual patch messages
375 # now generate the actual patch messages
376 name = None
376 name = None
377 for i, p in enumerate(patches):
377 for i, p in enumerate(patches):
378 if patchnames:
378 if patchnames:
379 name = patchnames[i]
379 name = patchnames[i]
380 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
380 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
381 len(patches), numbered, name)
381 len(patches), numbered, name)
382 msgs.append(msg)
382 msgs.append(msg)
383
383
384 return msgs
384 return msgs
385
385
386 def makeintro(patches):
386 def makeintro(patches):
387 tlen = len(str(len(patches)))
387 tlen = len(str(len(patches)))
388
388
389 flag = opts.get('flag') or ''
389 flag = opts.get('flag') or ''
390 if flag:
390 if flag:
391 flag = ' ' + ' '.join(flag)
391 flag = ' ' + ' '.join(flag)
392 prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
392 prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
393
393
394 subj = (opts.get('subject') or
394 subj = (opts.get('subject') or
395 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
395 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
396 if not subj:
396 if not subj:
397 return None # skip intro if the user doesn't bother
397 return None # skip intro if the user doesn't bother
398
398
399 subj = prefix + ' ' + subj
399 subj = prefix + ' ' + subj
400
400
401 body = ''
401 body = ''
402 if opts.get('diffstat'):
402 if opts.get('diffstat'):
403 # generate a cumulative diffstat of the whole patch series
403 # generate a cumulative diffstat of the whole patch series
404 diffstat = patch.diffstat(sum(patches, []))
404 diffstat = patch.diffstat(sum(patches, []))
405 body = '\n' + diffstat
405 body = '\n' + diffstat
406 else:
406 else:
407 diffstat = None
407 diffstat = None
408
408
409 body = getdescription(body, sender)
409 body = getdescription(body, sender)
410 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
410 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
411 msg['Subject'] = mail.headencode(ui, subj, _charsets,
411 msg['Subject'] = mail.headencode(ui, subj, _charsets,
412 opts.get('test'))
412 opts.get('test'))
413 return (msg, subj, diffstat)
413 return (msg, subj, diffstat)
414
414
415 def getbundlemsgs(bundle):
415 def getbundlemsgs(bundle):
416 subj = (opts.get('subject')
416 subj = (opts.get('subject')
417 or prompt(ui, 'Subject:', 'A bundle for your repository'))
417 or prompt(ui, 'Subject:', 'A bundle for your repository'))
418
418
419 body = getdescription('', sender)
419 body = getdescription('', sender)
420 msg = email.MIMEMultipart.MIMEMultipart()
420 msg = email.MIMEMultipart.MIMEMultipart()
421 if body:
421 if body:
422 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
422 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
423 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
423 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
424 datapart.set_payload(bundle)
424 datapart.set_payload(bundle)
425 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
425 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
426 datapart.add_header('Content-Disposition', 'attachment',
426 datapart.add_header('Content-Disposition', 'attachment',
427 filename=bundlename)
427 filename=bundlename)
428 email.Encoders.encode_base64(datapart)
428 email.Encoders.encode_base64(datapart)
429 msg.attach(datapart)
429 msg.attach(datapart)
430 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
430 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
431 return [(msg, subj, None)]
431 return [(msg, subj, None)]
432
432
433 sender = (opts.get('from') or ui.config('email', 'from') or
433 sender = (opts.get('from') or ui.config('email', 'from') or
434 ui.config('patchbomb', 'from') or
434 ui.config('patchbomb', 'from') or
435 prompt(ui, 'From', ui.username()))
435 prompt(ui, 'From', ui.username()))
436
436
437 if patches:
437 if patches:
438 msgs = getpatchmsgs(patches, opts.get('patchnames'))
438 msgs = getpatchmsgs(patches, opts.get('patchnames'))
439 elif bundle:
439 elif bundle:
440 msgs = getbundlemsgs(getbundle(dest))
440 msgs = getbundlemsgs(getbundle(dest))
441 else:
441 else:
442 msgs = getpatchmsgs(list(getpatches(revs)))
442 msgs = getpatchmsgs(list(getpatches(revs)))
443
443
444 showaddrs = []
444 showaddrs = []
445
445
446 def getaddrs(header, ask=False, default=None):
446 def getaddrs(header, ask=False, default=None):
447 configkey = header.lower()
447 configkey = header.lower()
448 opt = header.replace('-', '_').lower()
448 opt = header.replace('-', '_').lower()
449 addrs = opts.get(opt)
449 addrs = opts.get(opt)
450 if addrs:
450 if addrs:
451 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
451 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
452 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
452 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
453
453
454 # not on the command line: fallback to config and then maybe ask
454 # not on the command line: fallback to config and then maybe ask
455 addr = (ui.config('email', configkey) or
455 addr = (ui.config('email', configkey) or
456 ui.config('patchbomb', configkey) or
456 ui.config('patchbomb', configkey) or
457 '')
457 '')
458 if not addr and ask:
458 if not addr and ask:
459 addr = prompt(ui, header, default=default)
459 addr = prompt(ui, header, default=default)
460 if addr:
460 if addr:
461 showaddrs.append('%s: %s' % (header, addr))
461 showaddrs.append('%s: %s' % (header, addr))
462 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
462 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
463 else:
463 else:
464 return default
464 return default
465
465
466 to = getaddrs('To', ask=True)
466 to = getaddrs('To', ask=True)
467 if not to:
467 if not to:
468 # we can get here in non-interactive mode
468 # we can get here in non-interactive mode
469 raise util.Abort(_('no recipient addresses provided'))
469 raise util.Abort(_('no recipient addresses provided'))
470 cc = getaddrs('Cc', ask=True, default='') or []
470 cc = getaddrs('Cc', ask=True, default='') or []
471 bcc = getaddrs('Bcc') or []
471 bcc = getaddrs('Bcc') or []
472 replyto = getaddrs('Reply-To')
472 replyto = getaddrs('Reply-To')
473
473
474 if opts.get('diffstat') or opts.get('confirm'):
474 if opts.get('diffstat') or opts.get('confirm'):
475 ui.write(_('\nFinal summary:\n\n'))
475 ui.write(_('\nFinal summary:\n\n'))
476 ui.write('From: %s\n' % sender)
476 ui.write('From: %s\n' % sender)
477 for addr in showaddrs:
477 for addr in showaddrs:
478 ui.write('%s\n' % addr)
478 ui.write('%s\n' % addr)
479 for m, subj, ds in msgs:
479 for m, subj, ds in msgs:
480 ui.write('Subject: %s\n' % subj)
480 ui.write('Subject: %s\n' % subj)
481 if ds:
481 if ds:
482 ui.write(ds)
482 ui.write(ds)
483 ui.write('\n')
483 ui.write('\n')
484 if ui.promptchoice(_('are you sure you want to send (yn)?'),
484 if ui.promptchoice(_('are you sure you want to send (yn)?'),
485 (_('&Yes'), _('&No'))):
485 (_('&Yes'), _('&No'))):
486 raise util.Abort(_('patchbomb canceled'))
486 raise util.Abort(_('patchbomb canceled'))
487
487
488 ui.write('\n')
488 ui.write('\n')
489
489
490 parent = opts.get('in_reply_to') or None
490 parent = opts.get('in_reply_to') or None
491 # angle brackets may be omitted, they're not semantically part of the msg-id
491 # angle brackets may be omitted, they're not semantically part of the msg-id
492 if parent is not None:
492 if parent is not None:
493 if not parent.startswith('<'):
493 if not parent.startswith('<'):
494 parent = '<' + parent
494 parent = '<' + parent
495 if not parent.endswith('>'):
495 if not parent.endswith('>'):
496 parent += '>'
496 parent += '>'
497
497
498 first = True
498 first = True
499
499
500 sender_addr = email.Utils.parseaddr(sender)[1]
500 sender_addr = email.Utils.parseaddr(sender)[1]
501 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
501 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
502 sendmail = None
502 sendmail = None
503 for i, (m, subj, ds) in enumerate(msgs):
503 for i, (m, subj, ds) in enumerate(msgs):
504 try:
504 try:
505 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
505 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
506 except TypeError:
506 except TypeError:
507 m['Message-Id'] = genmsgid('patchbomb')
507 m['Message-Id'] = genmsgid('patchbomb')
508 if parent:
508 if parent:
509 m['In-Reply-To'] = parent
509 m['In-Reply-To'] = parent
510 m['References'] = parent
510 m['References'] = parent
511 if first:
511 if first:
512 parent = m['Message-Id']
512 parent = m['Message-Id']
513 first = False
513 first = False
514
514
515 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
515 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
516 m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)
516 m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)
517
517
518 start_time = (start_time[0] + 1, start_time[1])
518 start_time = (start_time[0] + 1, start_time[1])
519 m['From'] = sender
519 m['From'] = sender
520 m['To'] = ', '.join(to)
520 m['To'] = ', '.join(to)
521 if cc:
521 if cc:
522 m['Cc'] = ', '.join(cc)
522 m['Cc'] = ', '.join(cc)
523 if bcc:
523 if bcc:
524 m['Bcc'] = ', '.join(bcc)
524 m['Bcc'] = ', '.join(bcc)
525 if replyto:
525 if replyto:
526 m['Reply-To'] = ', '.join(replyto)
526 m['Reply-To'] = ', '.join(replyto)
527 if opts.get('test'):
527 if opts.get('test'):
528 ui.status(_('Displaying '), subj, ' ...\n')
528 ui.status(_('Displaying '), subj, ' ...\n')
529 ui.flush()
529 ui.flush()
530 if 'PAGER' in os.environ and not ui.plain():
530 if 'PAGER' in os.environ and not ui.plain():
531 fp = util.popen(os.environ['PAGER'], 'w')
531 fp = util.popen(os.environ['PAGER'], 'w')
532 else:
532 else:
533 fp = ui
533 fp = ui
534 generator = email.Generator.Generator(fp, mangle_from_=False)
534 generator = email.Generator.Generator(fp, mangle_from_=False)
535 try:
535 try:
536 generator.flatten(m, 0)
536 generator.flatten(m, 0)
537 fp.write('\n')
537 fp.write('\n')
538 except IOError, inst:
538 except IOError, inst:
539 if inst.errno != errno.EPIPE:
539 if inst.errno != errno.EPIPE:
540 raise
540 raise
541 if fp is not ui:
541 if fp is not ui:
542 fp.close()
542 fp.close()
543 else:
543 else:
544 if not sendmail:
544 if not sendmail:
545 sendmail = mail.connect(ui, mbox=mbox)
545 sendmail = mail.connect(ui, mbox=mbox)
546 ui.status(_('Sending '), subj, ' ...\n')
546 ui.status(_('Sending '), subj, ' ...\n')
547 ui.progress(_('sending'), i, item=subj, total=len(msgs))
547 ui.progress(_('sending'), i, item=subj, total=len(msgs))
548 if not mbox:
548 if not mbox:
549 # Exim does not remove the Bcc field
549 # Exim does not remove the Bcc field
550 del m['Bcc']
550 del m['Bcc']
551 fp = cStringIO.StringIO()
551 fp = cStringIO.StringIO()
552 generator = email.Generator.Generator(fp, mangle_from_=False)
552 generator = email.Generator.Generator(fp, mangle_from_=False)
553 generator.flatten(m, 0)
553 generator.flatten(m, 0)
554 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
554 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
555
555
556 ui.progress(_('writing'), None)
556 ui.progress(_('writing'), None)
557 ui.progress(_('sending'), None)
557 ui.progress(_('sending'), None)
@@ -1,186 +1,186 b''
1 # zeroconf.py - zeroconf support for Mercurial
1 # zeroconf.py - zeroconf support for Mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''discover and advertise repositories on the local network
8 '''discover and advertise repositories on the local network
9
9
10 Zeroconf-enabled repositories will be announced in a network without
10 Zeroconf-enabled repositories will be announced in a network without
11 the need to configure a server or a service. They can be discovered
11 the need to configure a server or a service. They can be discovered
12 without knowing their actual IP address.
12 without knowing their actual IP address.
13
13
14 To allow other people to discover your repository using run
14 To allow other people to discover your repository using run
15 :hg:`serve` in your repository::
15 :hg:`serve` in your repository::
16
16
17 $ cd test
17 $ cd test
18 $ hg serve
18 $ hg serve
19
19
20 You can discover Zeroconf-enabled repositories by running
20 You can discover Zeroconf-enabled repositories by running
21 :hg:`paths`::
21 :hg:`paths`::
22
22
23 $ hg paths
23 $ hg paths
24 zc-test = http://example.com:8000/test
24 zc-test = http://example.com:8000/test
25 '''
25 '''
26
26
27 import socket, time, os
27 import socket, time, os
28
28
29 import Zeroconf
29 import Zeroconf
30 from mercurial import ui, hg, encoding, util, dispatch
30 from mercurial import ui, hg, encoding, util, dispatch
31 from mercurial import extensions
31 from mercurial import extensions
32 from mercurial.hgweb import hgweb_mod
32 from mercurial.hgweb import hgweb_mod
33 from mercurial.hgweb import hgwebdir_mod
33 from mercurial.hgweb import hgwebdir_mod
34
34
35 # publish
35 # publish
36
36
37 server = None
37 server = None
38 localip = None
38 localip = None
39
39
40 def getip():
40 def getip():
41 # finds external-facing interface without sending any packets (Linux)
41 # finds external-facing interface without sending any packets (Linux)
42 try:
42 try:
43 s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
43 s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
44 s.connect(('1.0.0.1', 0))
44 s.connect(('1.0.0.1', 0))
45 ip = s.getsockname()[0]
45 ip = s.getsockname()[0]
46 return ip
46 return ip
47 except:
47 except socket.error:
48 pass
48 pass
49
49
50 # Generic method, sometimes gives useless results
50 # Generic method, sometimes gives useless results
51 try:
51 try:
52 dumbip = socket.gethostbyaddr(socket.gethostname())[2][0]
52 dumbip = socket.gethostbyaddr(socket.gethostname())[2][0]
53 if not dumbip.startswith('127.') and ':' not in dumbip:
53 if not dumbip.startswith('127.') and ':' not in dumbip:
54 return dumbip
54 return dumbip
55 except (socket.gaierror, socket.herror):
55 except (socket.gaierror, socket.herror):
56 dumbip = '127.0.0.1'
56 dumbip = '127.0.0.1'
57
57
58 # works elsewhere, but actually sends a packet
58 # works elsewhere, but actually sends a packet
59 try:
59 try:
60 s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
60 s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
61 s.connect(('1.0.0.1', 1))
61 s.connect(('1.0.0.1', 1))
62 ip = s.getsockname()[0]
62 ip = s.getsockname()[0]
63 return ip
63 return ip
64 except:
64 except socket.error:
65 pass
65 pass
66
66
67 return dumbip
67 return dumbip
68
68
69 def publish(name, desc, path, port):
69 def publish(name, desc, path, port):
70 global server, localip
70 global server, localip
71 if not server:
71 if not server:
72 ip = getip()
72 ip = getip()
73 if ip.startswith('127.'):
73 if ip.startswith('127.'):
74 # if we have no internet connection, this can happen.
74 # if we have no internet connection, this can happen.
75 return
75 return
76 localip = socket.inet_aton(ip)
76 localip = socket.inet_aton(ip)
77 server = Zeroconf.Zeroconf(ip)
77 server = Zeroconf.Zeroconf(ip)
78
78
79 hostname = socket.gethostname().split('.')[0]
79 hostname = socket.gethostname().split('.')[0]
80 host = hostname + ".local"
80 host = hostname + ".local"
81 name = "%s-%s" % (hostname, name)
81 name = "%s-%s" % (hostname, name)
82
82
83 # advertise to browsers
83 # advertise to browsers
84 svc = Zeroconf.ServiceInfo('_http._tcp.local.',
84 svc = Zeroconf.ServiceInfo('_http._tcp.local.',
85 name + '._http._tcp.local.',
85 name + '._http._tcp.local.',
86 server = host,
86 server = host,
87 port = port,
87 port = port,
88 properties = {'description': desc,
88 properties = {'description': desc,
89 'path': "/" + path},
89 'path': "/" + path},
90 address = localip, weight = 0, priority = 0)
90 address = localip, weight = 0, priority = 0)
91 server.registerService(svc)
91 server.registerService(svc)
92
92
93 # advertise to Mercurial clients
93 # advertise to Mercurial clients
94 svc = Zeroconf.ServiceInfo('_hg._tcp.local.',
94 svc = Zeroconf.ServiceInfo('_hg._tcp.local.',
95 name + '._hg._tcp.local.',
95 name + '._hg._tcp.local.',
96 server = host,
96 server = host,
97 port = port,
97 port = port,
98 properties = {'description': desc,
98 properties = {'description': desc,
99 'path': "/" + path},
99 'path': "/" + path},
100 address = localip, weight = 0, priority = 0)
100 address = localip, weight = 0, priority = 0)
101 server.registerService(svc)
101 server.registerService(svc)
102
102
103 class hgwebzc(hgweb_mod.hgweb):
103 class hgwebzc(hgweb_mod.hgweb):
104 def __init__(self, repo, name=None, baseui=None):
104 def __init__(self, repo, name=None, baseui=None):
105 super(hgwebzc, self).__init__(repo, name=name, baseui=baseui)
105 super(hgwebzc, self).__init__(repo, name=name, baseui=baseui)
106 name = self.reponame or os.path.basename(self.repo.root)
106 name = self.reponame or os.path.basename(self.repo.root)
107 path = self.repo.ui.config("web", "prefix", "").strip('/')
107 path = self.repo.ui.config("web", "prefix", "").strip('/')
108 desc = self.repo.ui.config("web", "description", name)
108 desc = self.repo.ui.config("web", "description", name)
109 publish(name, desc, path,
109 publish(name, desc, path,
110 util.getport(self.repo.ui.config("web", "port", 8000)))
110 util.getport(self.repo.ui.config("web", "port", 8000)))
111
111
112 class hgwebdirzc(hgwebdir_mod.hgwebdir):
112 class hgwebdirzc(hgwebdir_mod.hgwebdir):
113 def __init__(self, conf, baseui=None):
113 def __init__(self, conf, baseui=None):
114 super(hgwebdirzc, self).__init__(conf, baseui=baseui)
114 super(hgwebdirzc, self).__init__(conf, baseui=baseui)
115 prefix = self.ui.config("web", "prefix", "").strip('/') + '/'
115 prefix = self.ui.config("web", "prefix", "").strip('/') + '/'
116 for repo, path in self.repos:
116 for repo, path in self.repos:
117 u = self.ui.copy()
117 u = self.ui.copy()
118 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
118 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
119 name = os.path.basename(repo)
119 name = os.path.basename(repo)
120 path = (prefix + repo).strip('/')
120 path = (prefix + repo).strip('/')
121 desc = u.config('web', 'description', name)
121 desc = u.config('web', 'description', name)
122 publish(name, desc, path,
122 publish(name, desc, path,
123 util.getport(u.config("web", "port", 8000)))
123 util.getport(u.config("web", "port", 8000)))
124
124
125 # listen
125 # listen
126
126
127 class listener(object):
127 class listener(object):
128 def __init__(self):
128 def __init__(self):
129 self.found = {}
129 self.found = {}
130 def removeService(self, server, type, name):
130 def removeService(self, server, type, name):
131 if repr(name) in self.found:
131 if repr(name) in self.found:
132 del self.found[repr(name)]
132 del self.found[repr(name)]
133 def addService(self, server, type, name):
133 def addService(self, server, type, name):
134 self.found[repr(name)] = server.getServiceInfo(type, name)
134 self.found[repr(name)] = server.getServiceInfo(type, name)
135
135
136 def getzcpaths():
136 def getzcpaths():
137 ip = getip()
137 ip = getip()
138 if ip.startswith('127.'):
138 if ip.startswith('127.'):
139 return
139 return
140 server = Zeroconf.Zeroconf(ip)
140 server = Zeroconf.Zeroconf(ip)
141 l = listener()
141 l = listener()
142 Zeroconf.ServiceBrowser(server, "_hg._tcp.local.", l)
142 Zeroconf.ServiceBrowser(server, "_hg._tcp.local.", l)
143 time.sleep(1)
143 time.sleep(1)
144 server.close()
144 server.close()
145 for value in l.found.values():
145 for value in l.found.values():
146 name = value.name[:value.name.index('.')]
146 name = value.name[:value.name.index('.')]
147 url = "http://%s:%s%s" % (socket.inet_ntoa(value.address), value.port,
147 url = "http://%s:%s%s" % (socket.inet_ntoa(value.address), value.port,
148 value.properties.get("path", "/"))
148 value.properties.get("path", "/"))
149 yield "zc-" + name, url
149 yield "zc-" + name, url
150
150
151 def config(orig, self, section, key, default=None, untrusted=False):
151 def config(orig, self, section, key, default=None, untrusted=False):
152 if section == "paths" and key.startswith("zc-"):
152 if section == "paths" and key.startswith("zc-"):
153 for name, path in getzcpaths():
153 for name, path in getzcpaths():
154 if name == key:
154 if name == key:
155 return path
155 return path
156 return orig(self, section, key, default, untrusted)
156 return orig(self, section, key, default, untrusted)
157
157
158 def configitems(orig, self, section, untrusted=False):
158 def configitems(orig, self, section, untrusted=False):
159 repos = orig(self, section, untrusted)
159 repos = orig(self, section, untrusted)
160 if section == "paths":
160 if section == "paths":
161 repos += getzcpaths()
161 repos += getzcpaths()
162 return repos
162 return repos
163
163
164 def defaultdest(orig, source):
164 def defaultdest(orig, source):
165 for name, path in getzcpaths():
165 for name, path in getzcpaths():
166 if path == source:
166 if path == source:
167 return name.encode(encoding.encoding)
167 return name.encode(encoding.encoding)
168 return orig(source)
168 return orig(source)
169
169
170 def cleanupafterdispatch(orig, ui, options, cmd, cmdfunc):
170 def cleanupafterdispatch(orig, ui, options, cmd, cmdfunc):
171 try:
171 try:
172 return orig(ui, options, cmd, cmdfunc)
172 return orig(ui, options, cmd, cmdfunc)
173 finally:
173 finally:
174 # we need to call close() on the server to notify() the various
174 # we need to call close() on the server to notify() the various
175 # threading Conditions and allow the background threads to exit
175 # threading Conditions and allow the background threads to exit
176 global server
176 global server
177 if server:
177 if server:
178 server.close()
178 server.close()
179
179
180 extensions.wrapfunction(dispatch, '_runcommand', cleanupafterdispatch)
180 extensions.wrapfunction(dispatch, '_runcommand', cleanupafterdispatch)
181
181
182 extensions.wrapfunction(ui.ui, 'config', config)
182 extensions.wrapfunction(ui.ui, 'config', config)
183 extensions.wrapfunction(ui.ui, 'configitems', configitems)
183 extensions.wrapfunction(ui.ui, 'configitems', configitems)
184 extensions.wrapfunction(hg, 'defaultdest', defaultdest)
184 extensions.wrapfunction(hg, 'defaultdest', defaultdest)
185 hgweb_mod.hgweb = hgwebzc
185 hgweb_mod.hgweb = hgwebzc
186 hgwebdir_mod.hgwebdir = hgwebdirzc
186 hgwebdir_mod.hgwebdir = hgwebdirzc
@@ -1,764 +1,764 b''
1 # This library is free software; you can redistribute it and/or
1 # This library is free software; you can redistribute it and/or
2 # modify it under the terms of the GNU Lesser General Public
2 # modify it under the terms of the GNU Lesser General Public
3 # License as published by the Free Software Foundation; either
3 # License as published by the Free Software Foundation; either
4 # version 2.1 of the License, or (at your option) any later version.
4 # version 2.1 of the License, or (at your option) any later version.
5 #
5 #
6 # This library is distributed in the hope that it will be useful,
6 # This library is distributed in the hope that it will be useful,
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
9 # Lesser General Public License for more details.
9 # Lesser General Public License for more details.
10 #
10 #
11 # You should have received a copy of the GNU Lesser General Public
11 # You should have received a copy of the GNU Lesser General Public
12 # License along with this library; if not, see
12 # License along with this library; if not, see
13 # <http://www.gnu.org/licenses/>.
13 # <http://www.gnu.org/licenses/>.
14
14
15 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
15 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
16 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
16 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
17
17
18 # Modified by Benoit Boissinot:
18 # Modified by Benoit Boissinot:
19 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
19 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
20 # Modified by Dirkjan Ochtman:
20 # Modified by Dirkjan Ochtman:
21 # - import md5 function from a local util module
21 # - import md5 function from a local util module
22 # Modified by Martin Geisler:
22 # Modified by Martin Geisler:
23 # - moved md5 function from local util module to this module
23 # - moved md5 function from local util module to this module
24 # Modified by Augie Fackler:
24 # Modified by Augie Fackler:
25 # - add safesend method and use it to prevent broken pipe errors
25 # - add safesend method and use it to prevent broken pipe errors
26 # on large POST requests
26 # on large POST requests
27
27
28 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
28 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
29
29
30 >>> import urllib2
30 >>> import urllib2
31 >>> from keepalive import HTTPHandler
31 >>> from keepalive import HTTPHandler
32 >>> keepalive_handler = HTTPHandler()
32 >>> keepalive_handler = HTTPHandler()
33 >>> opener = urllib2.build_opener(keepalive_handler)
33 >>> opener = urllib2.build_opener(keepalive_handler)
34 >>> urllib2.install_opener(opener)
34 >>> urllib2.install_opener(opener)
35 >>>
35 >>>
36 >>> fo = urllib2.urlopen('http://www.python.org')
36 >>> fo = urllib2.urlopen('http://www.python.org')
37
37
38 If a connection to a given host is requested, and all of the existing
38 If a connection to a given host is requested, and all of the existing
39 connections are still in use, another connection will be opened. If
39 connections are still in use, another connection will be opened. If
40 the handler tries to use an existing connection but it fails in some
40 the handler tries to use an existing connection but it fails in some
41 way, it will be closed and removed from the pool.
41 way, it will be closed and removed from the pool.
42
42
43 To remove the handler, simply re-run build_opener with no arguments, and
43 To remove the handler, simply re-run build_opener with no arguments, and
44 install that opener.
44 install that opener.
45
45
46 You can explicitly close connections by using the close_connection()
46 You can explicitly close connections by using the close_connection()
47 method of the returned file-like object (described below) or you can
47 method of the returned file-like object (described below) or you can
48 use the handler methods:
48 use the handler methods:
49
49
50 close_connection(host)
50 close_connection(host)
51 close_all()
51 close_all()
52 open_connections()
52 open_connections()
53
53
54 NOTE: using the close_connection and close_all methods of the handler
54 NOTE: using the close_connection and close_all methods of the handler
55 should be done with care when using multiple threads.
55 should be done with care when using multiple threads.
56 * there is nothing that prevents another thread from creating new
56 * there is nothing that prevents another thread from creating new
57 connections immediately after connections are closed
57 connections immediately after connections are closed
58 * no checks are done to prevent in-use connections from being closed
58 * no checks are done to prevent in-use connections from being closed
59
59
60 >>> keepalive_handler.close_all()
60 >>> keepalive_handler.close_all()
61
61
62 EXTRA ATTRIBUTES AND METHODS
62 EXTRA ATTRIBUTES AND METHODS
63
63
64 Upon a status of 200, the object returned has a few additional
64 Upon a status of 200, the object returned has a few additional
65 attributes and methods, which should not be used if you want to
65 attributes and methods, which should not be used if you want to
66 remain consistent with the normal urllib2-returned objects:
66 remain consistent with the normal urllib2-returned objects:
67
67
68 close_connection() - close the connection to the host
68 close_connection() - close the connection to the host
69 readlines() - you know, readlines()
69 readlines() - you know, readlines()
70 status - the return status (ie 404)
70 status - the return status (ie 404)
71 reason - english translation of status (ie 'File not found')
71 reason - english translation of status (ie 'File not found')
72
72
73 If you want the best of both worlds, use this inside an
73 If you want the best of both worlds, use this inside an
74 AttributeError-catching try:
74 AttributeError-catching try:
75
75
76 >>> try: status = fo.status
76 >>> try: status = fo.status
77 >>> except AttributeError: status = None
77 >>> except AttributeError: status = None
78
78
79 Unfortunately, these are ONLY there if status == 200, so it's not
79 Unfortunately, these are ONLY there if status == 200, so it's not
80 easy to distinguish between non-200 responses. The reason is that
80 easy to distinguish between non-200 responses. The reason is that
81 urllib2 tries to do clever things with error codes 301, 302, 401,
81 urllib2 tries to do clever things with error codes 301, 302, 401,
82 and 407, and it wraps the object upon return.
82 and 407, and it wraps the object upon return.
83
83
84 For python versions earlier than 2.4, you can avoid this fancy error
84 For python versions earlier than 2.4, you can avoid this fancy error
85 handling by setting the module-level global HANDLE_ERRORS to zero.
85 handling by setting the module-level global HANDLE_ERRORS to zero.
86 You see, prior to 2.4, it's the HTTP Handler's job to determine what
86 You see, prior to 2.4, it's the HTTP Handler's job to determine what
87 to handle specially, and what to just pass up. HANDLE_ERRORS == 0
87 to handle specially, and what to just pass up. HANDLE_ERRORS == 0
88 means "pass everything up". In python 2.4, however, this job no
88 means "pass everything up". In python 2.4, however, this job no
89 longer belongs to the HTTP Handler and is now done by a NEW handler,
89 longer belongs to the HTTP Handler and is now done by a NEW handler,
90 HTTPErrorProcessor. Here's the bottom line:
90 HTTPErrorProcessor. Here's the bottom line:
91
91
92 python version < 2.4
92 python version < 2.4
93 HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
93 HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
94 errors
94 errors
95 HANDLE_ERRORS == 0 pass everything up, error processing is
95 HANDLE_ERRORS == 0 pass everything up, error processing is
96 left to the calling code
96 left to the calling code
97 python version >= 2.4
97 python version >= 2.4
98 HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
98 HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
99 HANDLE_ERRORS == 0 (default) pass everything up, let the
99 HANDLE_ERRORS == 0 (default) pass everything up, let the
100 other handlers (specifically,
100 other handlers (specifically,
101 HTTPErrorProcessor) decide what to do
101 HTTPErrorProcessor) decide what to do
102
102
103 In practice, setting the variable either way makes little difference
103 In practice, setting the variable either way makes little difference
104 in python 2.4, so for the most consistent behavior across versions,
104 in python 2.4, so for the most consistent behavior across versions,
105 you probably just want to use the defaults, which will give you
105 you probably just want to use the defaults, which will give you
106 exceptions on errors.
106 exceptions on errors.
107
107
108 """
108 """
109
109
110 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
110 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
111
111
112 import errno
112 import errno
113 import httplib
113 import httplib
114 import socket
114 import socket
115 import thread
115 import thread
116 import urllib2
116 import urllib2
117
117
118 DEBUG = None
118 DEBUG = None
119
119
120 import sys
120 import sys
121 if sys.version_info < (2, 4):
121 if sys.version_info < (2, 4):
122 HANDLE_ERRORS = 1
122 HANDLE_ERRORS = 1
123 else: HANDLE_ERRORS = 0
123 else: HANDLE_ERRORS = 0
124
124
125 class ConnectionManager(object):
125 class ConnectionManager(object):
126 """
126 """
127 The connection manager must be able to:
127 The connection manager must be able to:
128 * keep track of all existing
128 * keep track of all existing
129 """
129 """
130 def __init__(self):
130 def __init__(self):
131 self._lock = thread.allocate_lock()
131 self._lock = thread.allocate_lock()
132 self._hostmap = {} # map hosts to a list of connections
132 self._hostmap = {} # map hosts to a list of connections
133 self._connmap = {} # map connections to host
133 self._connmap = {} # map connections to host
134 self._readymap = {} # map connection to ready state
134 self._readymap = {} # map connection to ready state
135
135
136 def add(self, host, connection, ready):
136 def add(self, host, connection, ready):
137 self._lock.acquire()
137 self._lock.acquire()
138 try:
138 try:
139 if host not in self._hostmap:
139 if host not in self._hostmap:
140 self._hostmap[host] = []
140 self._hostmap[host] = []
141 self._hostmap[host].append(connection)
141 self._hostmap[host].append(connection)
142 self._connmap[connection] = host
142 self._connmap[connection] = host
143 self._readymap[connection] = ready
143 self._readymap[connection] = ready
144 finally:
144 finally:
145 self._lock.release()
145 self._lock.release()
146
146
147 def remove(self, connection):
147 def remove(self, connection):
148 self._lock.acquire()
148 self._lock.acquire()
149 try:
149 try:
150 try:
150 try:
151 host = self._connmap[connection]
151 host = self._connmap[connection]
152 except KeyError:
152 except KeyError:
153 pass
153 pass
154 else:
154 else:
155 del self._connmap[connection]
155 del self._connmap[connection]
156 del self._readymap[connection]
156 del self._readymap[connection]
157 self._hostmap[host].remove(connection)
157 self._hostmap[host].remove(connection)
158 if not self._hostmap[host]: del self._hostmap[host]
158 if not self._hostmap[host]: del self._hostmap[host]
159 finally:
159 finally:
160 self._lock.release()
160 self._lock.release()
161
161
162 def set_ready(self, connection, ready):
162 def set_ready(self, connection, ready):
163 try:
163 try:
164 self._readymap[connection] = ready
164 self._readymap[connection] = ready
165 except KeyError:
165 except KeyError:
166 pass
166 pass
167
167
168 def get_ready_conn(self, host):
168 def get_ready_conn(self, host):
169 conn = None
169 conn = None
170 self._lock.acquire()
170 self._lock.acquire()
171 try:
171 try:
172 if host in self._hostmap:
172 if host in self._hostmap:
173 for c in self._hostmap[host]:
173 for c in self._hostmap[host]:
174 if self._readymap[c]:
174 if self._readymap[c]:
175 self._readymap[c] = 0
175 self._readymap[c] = 0
176 conn = c
176 conn = c
177 break
177 break
178 finally:
178 finally:
179 self._lock.release()
179 self._lock.release()
180 return conn
180 return conn
181
181
182 def get_all(self, host=None):
182 def get_all(self, host=None):
183 if host:
183 if host:
184 return list(self._hostmap.get(host, []))
184 return list(self._hostmap.get(host, []))
185 else:
185 else:
186 return dict(self._hostmap)
186 return dict(self._hostmap)
187
187
188 class KeepAliveHandler(object):
188 class KeepAliveHandler(object):
189 def __init__(self):
189 def __init__(self):
190 self._cm = ConnectionManager()
190 self._cm = ConnectionManager()
191
191
192 #### Connection Management
192 #### Connection Management
193 def open_connections(self):
193 def open_connections(self):
194 """return a list of connected hosts and the number of connections
194 """return a list of connected hosts and the number of connections
195 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
195 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
196 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
196 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
197
197
198 def close_connection(self, host):
198 def close_connection(self, host):
199 """close connection(s) to <host>
199 """close connection(s) to <host>
200 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
200 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
201 no error occurs if there is no connection to that host."""
201 no error occurs if there is no connection to that host."""
202 for h in self._cm.get_all(host):
202 for h in self._cm.get_all(host):
203 self._cm.remove(h)
203 self._cm.remove(h)
204 h.close()
204 h.close()
205
205
206 def close_all(self):
206 def close_all(self):
207 """close all open connections"""
207 """close all open connections"""
208 for host, conns in self._cm.get_all().iteritems():
208 for host, conns in self._cm.get_all().iteritems():
209 for h in conns:
209 for h in conns:
210 self._cm.remove(h)
210 self._cm.remove(h)
211 h.close()
211 h.close()
212
212
213 def _request_closed(self, request, host, connection):
213 def _request_closed(self, request, host, connection):
214 """tells us that this request is now closed and the the
214 """tells us that this request is now closed and the the
215 connection is ready for another request"""
215 connection is ready for another request"""
216 self._cm.set_ready(connection, 1)
216 self._cm.set_ready(connection, 1)
217
217
218 def _remove_connection(self, host, connection, close=0):
218 def _remove_connection(self, host, connection, close=0):
219 if close:
219 if close:
220 connection.close()
220 connection.close()
221 self._cm.remove(connection)
221 self._cm.remove(connection)
222
222
223 #### Transaction Execution
223 #### Transaction Execution
224 def http_open(self, req):
224 def http_open(self, req):
225 return self.do_open(HTTPConnection, req)
225 return self.do_open(HTTPConnection, req)
226
226
227 def do_open(self, http_class, req):
227 def do_open(self, http_class, req):
228 host = req.get_host()
228 host = req.get_host()
229 if not host:
229 if not host:
230 raise urllib2.URLError('no host given')
230 raise urllib2.URLError('no host given')
231
231
232 try:
232 try:
233 h = self._cm.get_ready_conn(host)
233 h = self._cm.get_ready_conn(host)
234 while h:
234 while h:
235 r = self._reuse_connection(h, req, host)
235 r = self._reuse_connection(h, req, host)
236
236
237 # if this response is non-None, then it worked and we're
237 # if this response is non-None, then it worked and we're
238 # done. Break out, skipping the else block.
238 # done. Break out, skipping the else block.
239 if r:
239 if r:
240 break
240 break
241
241
242 # connection is bad - possibly closed by server
242 # connection is bad - possibly closed by server
243 # discard it and ask for the next free connection
243 # discard it and ask for the next free connection
244 h.close()
244 h.close()
245 self._cm.remove(h)
245 self._cm.remove(h)
246 h = self._cm.get_ready_conn(host)
246 h = self._cm.get_ready_conn(host)
247 else:
247 else:
248 # no (working) free connections were found. Create a new one.
248 # no (working) free connections were found. Create a new one.
249 h = http_class(host)
249 h = http_class(host)
250 if DEBUG:
250 if DEBUG:
251 DEBUG.info("creating new connection to %s (%d)",
251 DEBUG.info("creating new connection to %s (%d)",
252 host, id(h))
252 host, id(h))
253 self._cm.add(host, h, 0)
253 self._cm.add(host, h, 0)
254 self._start_transaction(h, req)
254 self._start_transaction(h, req)
255 r = h.getresponse()
255 r = h.getresponse()
256 except (socket.error, httplib.HTTPException), err:
256 except (socket.error, httplib.HTTPException), err:
257 raise urllib2.URLError(err)
257 raise urllib2.URLError(err)
258
258
259 # if not a persistent connection, don't try to reuse it
259 # if not a persistent connection, don't try to reuse it
260 if r.will_close:
260 if r.will_close:
261 self._cm.remove(h)
261 self._cm.remove(h)
262
262
263 if DEBUG:
263 if DEBUG:
264 DEBUG.info("STATUS: %s, %s", r.status, r.reason)
264 DEBUG.info("STATUS: %s, %s", r.status, r.reason)
265 r._handler = self
265 r._handler = self
266 r._host = host
266 r._host = host
267 r._url = req.get_full_url()
267 r._url = req.get_full_url()
268 r._connection = h
268 r._connection = h
269 r.code = r.status
269 r.code = r.status
270 r.headers = r.msg
270 r.headers = r.msg
271 r.msg = r.reason
271 r.msg = r.reason
272
272
273 if r.status == 200 or not HANDLE_ERRORS:
273 if r.status == 200 or not HANDLE_ERRORS:
274 return r
274 return r
275 else:
275 else:
276 return self.parent.error('http', req, r,
276 return self.parent.error('http', req, r,
277 r.status, r.msg, r.headers)
277 r.status, r.msg, r.headers)
278
278
279 def _reuse_connection(self, h, req, host):
279 def _reuse_connection(self, h, req, host):
280 """start the transaction with a re-used connection
280 """start the transaction with a re-used connection
281 return a response object (r) upon success or None on failure.
281 return a response object (r) upon success or None on failure.
282 This DOES not close or remove bad connections in cases where
282 This DOES not close or remove bad connections in cases where
283 it returns. However, if an unexpected exception occurs, it
283 it returns. However, if an unexpected exception occurs, it
284 will close and remove the connection before re-raising.
284 will close and remove the connection before re-raising.
285 """
285 """
286 try:
286 try:
287 self._start_transaction(h, req)
287 self._start_transaction(h, req)
288 r = h.getresponse()
288 r = h.getresponse()
289 # note: just because we got something back doesn't mean it
289 # note: just because we got something back doesn't mean it
290 # worked. We'll check the version below, too.
290 # worked. We'll check the version below, too.
291 except (socket.error, httplib.HTTPException):
291 except (socket.error, httplib.HTTPException):
292 r = None
292 r = None
293 except:
293 except:
294 # adding this block just in case we've missed
294 # adding this block just in case we've missed
295 # something we will still raise the exception, but
295 # something we will still raise the exception, but
296 # lets try and close the connection and remove it
296 # lets try and close the connection and remove it
297 # first. We previously got into a nasty loop
297 # first. We previously got into a nasty loop
298 # where an exception was uncaught, and so the
298 # where an exception was uncaught, and so the
299 # connection stayed open. On the next try, the
299 # connection stayed open. On the next try, the
300 # same exception was raised, etc. The tradeoff is
300 # same exception was raised, etc. The tradeoff is
301 # that it's now possible this call will raise
301 # that it's now possible this call will raise
302 # a DIFFERENT exception
302 # a DIFFERENT exception
303 if DEBUG:
303 if DEBUG:
304 DEBUG.error("unexpected exception - closing "
304 DEBUG.error("unexpected exception - closing "
305 "connection to %s (%d)", host, id(h))
305 "connection to %s (%d)", host, id(h))
306 self._cm.remove(h)
306 self._cm.remove(h)
307 h.close()
307 h.close()
308 raise
308 raise
309
309
310 if r is None or r.version == 9:
310 if r is None or r.version == 9:
311 # httplib falls back to assuming HTTP 0.9 if it gets a
311 # httplib falls back to assuming HTTP 0.9 if it gets a
312 # bad header back. This is most likely to happen if
312 # bad header back. This is most likely to happen if
313 # the socket has been closed by the server since we
313 # the socket has been closed by the server since we
314 # last used the connection.
314 # last used the connection.
315 if DEBUG:
315 if DEBUG:
316 DEBUG.info("failed to re-use connection to %s (%d)",
316 DEBUG.info("failed to re-use connection to %s (%d)",
317 host, id(h))
317 host, id(h))
318 r = None
318 r = None
319 else:
319 else:
320 if DEBUG:
320 if DEBUG:
321 DEBUG.info("re-using connection to %s (%d)", host, id(h))
321 DEBUG.info("re-using connection to %s (%d)", host, id(h))
322
322
323 return r
323 return r
324
324
325 def _start_transaction(self, h, req):
325 def _start_transaction(self, h, req):
326 # What follows mostly reimplements HTTPConnection.request()
326 # What follows mostly reimplements HTTPConnection.request()
327 # except it adds self.parent.addheaders in the mix.
327 # except it adds self.parent.addheaders in the mix.
328 headers = req.headers.copy()
328 headers = req.headers.copy()
329 if sys.version_info >= (2, 4):
329 if sys.version_info >= (2, 4):
330 headers.update(req.unredirected_hdrs)
330 headers.update(req.unredirected_hdrs)
331 headers.update(self.parent.addheaders)
331 headers.update(self.parent.addheaders)
332 headers = dict((n.lower(), v) for n, v in headers.items())
332 headers = dict((n.lower(), v) for n, v in headers.items())
333 skipheaders = {}
333 skipheaders = {}
334 for n in ('host', 'accept-encoding'):
334 for n in ('host', 'accept-encoding'):
335 if n in headers:
335 if n in headers:
336 skipheaders['skip_' + n.replace('-', '_')] = 1
336 skipheaders['skip_' + n.replace('-', '_')] = 1
337 try:
337 try:
338 if req.has_data():
338 if req.has_data():
339 data = req.get_data()
339 data = req.get_data()
340 h.putrequest('POST', req.get_selector(), **skipheaders)
340 h.putrequest('POST', req.get_selector(), **skipheaders)
341 if 'content-type' not in headers:
341 if 'content-type' not in headers:
342 h.putheader('Content-type',
342 h.putheader('Content-type',
343 'application/x-www-form-urlencoded')
343 'application/x-www-form-urlencoded')
344 if 'content-length' not in headers:
344 if 'content-length' not in headers:
345 h.putheader('Content-length', '%d' % len(data))
345 h.putheader('Content-length', '%d' % len(data))
346 else:
346 else:
347 h.putrequest('GET', req.get_selector(), **skipheaders)
347 h.putrequest('GET', req.get_selector(), **skipheaders)
348 except (socket.error), err:
348 except (socket.error), err:
349 raise urllib2.URLError(err)
349 raise urllib2.URLError(err)
350 for k, v in headers.items():
350 for k, v in headers.items():
351 h.putheader(k, v)
351 h.putheader(k, v)
352 h.endheaders()
352 h.endheaders()
353 if req.has_data():
353 if req.has_data():
354 h.send(data)
354 h.send(data)
355
355
356 class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
356 class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
357 pass
357 pass
358
358
359 class HTTPResponse(httplib.HTTPResponse):
359 class HTTPResponse(httplib.HTTPResponse):
360 # we need to subclass HTTPResponse in order to
360 # we need to subclass HTTPResponse in order to
361 # 1) add readline() and readlines() methods
361 # 1) add readline() and readlines() methods
362 # 2) add close_connection() methods
362 # 2) add close_connection() methods
363 # 3) add info() and geturl() methods
363 # 3) add info() and geturl() methods
364
364
365 # in order to add readline(), read must be modified to deal with a
365 # in order to add readline(), read must be modified to deal with a
366 # buffer. example: readline must read a buffer and then spit back
366 # buffer. example: readline must read a buffer and then spit back
367 # one line at a time. The only real alternative is to read one
367 # one line at a time. The only real alternative is to read one
368 # BYTE at a time (ick). Once something has been read, it can't be
368 # BYTE at a time (ick). Once something has been read, it can't be
369 # put back (ok, maybe it can, but that's even uglier than this),
369 # put back (ok, maybe it can, but that's even uglier than this),
370 # so if you THEN do a normal read, you must first take stuff from
370 # so if you THEN do a normal read, you must first take stuff from
371 # the buffer.
371 # the buffer.
372
372
373 # the read method wraps the original to accomodate buffering,
373 # the read method wraps the original to accomodate buffering,
374 # although read() never adds to the buffer.
374 # although read() never adds to the buffer.
375 # Both readline and readlines have been stolen with almost no
375 # Both readline and readlines have been stolen with almost no
376 # modification from socket.py
376 # modification from socket.py
377
377
378
378
379 def __init__(self, sock, debuglevel=0, strict=0, method=None):
379 def __init__(self, sock, debuglevel=0, strict=0, method=None):
380 if method: # the httplib in python 2.3 uses the method arg
380 if method: # the httplib in python 2.3 uses the method arg
381 httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
381 httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
382 else: # 2.2 doesn't
382 else: # 2.2 doesn't
383 httplib.HTTPResponse.__init__(self, sock, debuglevel)
383 httplib.HTTPResponse.__init__(self, sock, debuglevel)
384 self.fileno = sock.fileno
384 self.fileno = sock.fileno
385 self.code = None
385 self.code = None
386 self._rbuf = ''
386 self._rbuf = ''
387 self._rbufsize = 8096
387 self._rbufsize = 8096
388 self._handler = None # inserted by the handler later
388 self._handler = None # inserted by the handler later
389 self._host = None # (same)
389 self._host = None # (same)
390 self._url = None # (same)
390 self._url = None # (same)
391 self._connection = None # (same)
391 self._connection = None # (same)
392
392
393 _raw_read = httplib.HTTPResponse.read
393 _raw_read = httplib.HTTPResponse.read
394
394
395 def close(self):
395 def close(self):
396 if self.fp:
396 if self.fp:
397 self.fp.close()
397 self.fp.close()
398 self.fp = None
398 self.fp = None
399 if self._handler:
399 if self._handler:
400 self._handler._request_closed(self, self._host,
400 self._handler._request_closed(self, self._host,
401 self._connection)
401 self._connection)
402
402
403 def close_connection(self):
403 def close_connection(self):
404 self._handler._remove_connection(self._host, self._connection, close=1)
404 self._handler._remove_connection(self._host, self._connection, close=1)
405 self.close()
405 self.close()
406
406
407 def info(self):
407 def info(self):
408 return self.headers
408 return self.headers
409
409
410 def geturl(self):
410 def geturl(self):
411 return self._url
411 return self._url
412
412
413 def read(self, amt=None):
413 def read(self, amt=None):
414 # the _rbuf test is only in this first if for speed. It's not
414 # the _rbuf test is only in this first if for speed. It's not
415 # logically necessary
415 # logically necessary
416 if self._rbuf and not amt is None:
416 if self._rbuf and not amt is None:
417 L = len(self._rbuf)
417 L = len(self._rbuf)
418 if amt > L:
418 if amt > L:
419 amt -= L
419 amt -= L
420 else:
420 else:
421 s = self._rbuf[:amt]
421 s = self._rbuf[:amt]
422 self._rbuf = self._rbuf[amt:]
422 self._rbuf = self._rbuf[amt:]
423 return s
423 return s
424
424
425 s = self._rbuf + self._raw_read(amt)
425 s = self._rbuf + self._raw_read(amt)
426 self._rbuf = ''
426 self._rbuf = ''
427 return s
427 return s
428
428
429 # stolen from Python SVN #68532 to fix issue1088
429 # stolen from Python SVN #68532 to fix issue1088
430 def _read_chunked(self, amt):
430 def _read_chunked(self, amt):
431 chunk_left = self.chunk_left
431 chunk_left = self.chunk_left
432 value = ''
432 value = ''
433
433
434 # XXX This accumulates chunks by repeated string concatenation,
434 # XXX This accumulates chunks by repeated string concatenation,
435 # which is not efficient as the number or size of chunks gets big.
435 # which is not efficient as the number or size of chunks gets big.
436 while True:
436 while True:
437 if chunk_left is None:
437 if chunk_left is None:
438 line = self.fp.readline()
438 line = self.fp.readline()
439 i = line.find(';')
439 i = line.find(';')
440 if i >= 0:
440 if i >= 0:
441 line = line[:i] # strip chunk-extensions
441 line = line[:i] # strip chunk-extensions
442 try:
442 try:
443 chunk_left = int(line, 16)
443 chunk_left = int(line, 16)
444 except ValueError:
444 except ValueError:
445 # close the connection as protocol synchronisation is
445 # close the connection as protocol synchronisation is
446 # probably lost
446 # probably lost
447 self.close()
447 self.close()
448 raise httplib.IncompleteRead(value)
448 raise httplib.IncompleteRead(value)
449 if chunk_left == 0:
449 if chunk_left == 0:
450 break
450 break
451 if amt is None:
451 if amt is None:
452 value += self._safe_read(chunk_left)
452 value += self._safe_read(chunk_left)
453 elif amt < chunk_left:
453 elif amt < chunk_left:
454 value += self._safe_read(amt)
454 value += self._safe_read(amt)
455 self.chunk_left = chunk_left - amt
455 self.chunk_left = chunk_left - amt
456 return value
456 return value
457 elif amt == chunk_left:
457 elif amt == chunk_left:
458 value += self._safe_read(amt)
458 value += self._safe_read(amt)
459 self._safe_read(2) # toss the CRLF at the end of the chunk
459 self._safe_read(2) # toss the CRLF at the end of the chunk
460 self.chunk_left = None
460 self.chunk_left = None
461 return value
461 return value
462 else:
462 else:
463 value += self._safe_read(chunk_left)
463 value += self._safe_read(chunk_left)
464 amt -= chunk_left
464 amt -= chunk_left
465
465
466 # we read the whole chunk, get another
466 # we read the whole chunk, get another
467 self._safe_read(2) # toss the CRLF at the end of the chunk
467 self._safe_read(2) # toss the CRLF at the end of the chunk
468 chunk_left = None
468 chunk_left = None
469
469
470 # read and discard trailer up to the CRLF terminator
470 # read and discard trailer up to the CRLF terminator
471 ### note: we shouldn't have any trailers!
471 ### note: we shouldn't have any trailers!
472 while True:
472 while True:
473 line = self.fp.readline()
473 line = self.fp.readline()
474 if not line:
474 if not line:
475 # a vanishingly small number of sites EOF without
475 # a vanishingly small number of sites EOF without
476 # sending the trailer
476 # sending the trailer
477 break
477 break
478 if line == '\r\n':
478 if line == '\r\n':
479 break
479 break
480
480
481 # we read everything; close the "file"
481 # we read everything; close the "file"
482 self.close()
482 self.close()
483
483
484 return value
484 return value
485
485
486 def readline(self, limit=-1):
486 def readline(self, limit=-1):
487 i = self._rbuf.find('\n')
487 i = self._rbuf.find('\n')
488 while i < 0 and not (0 < limit <= len(self._rbuf)):
488 while i < 0 and not (0 < limit <= len(self._rbuf)):
489 new = self._raw_read(self._rbufsize)
489 new = self._raw_read(self._rbufsize)
490 if not new:
490 if not new:
491 break
491 break
492 i = new.find('\n')
492 i = new.find('\n')
493 if i >= 0:
493 if i >= 0:
494 i = i + len(self._rbuf)
494 i = i + len(self._rbuf)
495 self._rbuf = self._rbuf + new
495 self._rbuf = self._rbuf + new
496 if i < 0:
496 if i < 0:
497 i = len(self._rbuf)
497 i = len(self._rbuf)
498 else:
498 else:
499 i = i + 1
499 i = i + 1
500 if 0 <= limit < len(self._rbuf):
500 if 0 <= limit < len(self._rbuf):
501 i = limit
501 i = limit
502 data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
502 data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
503 return data
503 return data
504
504
505 def readlines(self, sizehint = 0):
505 def readlines(self, sizehint = 0):
506 total = 0
506 total = 0
507 list = []
507 list = []
508 while True:
508 while True:
509 line = self.readline()
509 line = self.readline()
510 if not line:
510 if not line:
511 break
511 break
512 list.append(line)
512 list.append(line)
513 total += len(line)
513 total += len(line)
514 if sizehint and total >= sizehint:
514 if sizehint and total >= sizehint:
515 break
515 break
516 return list
516 return list
517
517
518 def safesend(self, str):
518 def safesend(self, str):
519 """Send `str' to the server.
519 """Send `str' to the server.
520
520
521 Shamelessly ripped off from httplib to patch a bad behavior.
521 Shamelessly ripped off from httplib to patch a bad behavior.
522 """
522 """
523 # _broken_pipe_resp is an attribute we set in this function
523 # _broken_pipe_resp is an attribute we set in this function
524 # if the socket is closed while we're sending data but
524 # if the socket is closed while we're sending data but
525 # the server sent us a response before hanging up.
525 # the server sent us a response before hanging up.
526 # In that case, we want to pretend to send the rest of the
526 # In that case, we want to pretend to send the rest of the
527 # outgoing data, and then let the user use getresponse()
527 # outgoing data, and then let the user use getresponse()
528 # (which we wrap) to get this last response before
528 # (which we wrap) to get this last response before
529 # opening a new socket.
529 # opening a new socket.
530 if getattr(self, '_broken_pipe_resp', None) is not None:
530 if getattr(self, '_broken_pipe_resp', None) is not None:
531 return
531 return
532
532
533 if self.sock is None:
533 if self.sock is None:
534 if self.auto_open:
534 if self.auto_open:
535 self.connect()
535 self.connect()
536 else:
536 else:
537 raise httplib.NotConnected
537 raise httplib.NotConnected
538
538
539 # send the data to the server. if we get a broken pipe, then close
539 # send the data to the server. if we get a broken pipe, then close
540 # the socket. we want to reconnect when somebody tries to send again.
540 # the socket. we want to reconnect when somebody tries to send again.
541 #
541 #
542 # NOTE: we DO propagate the error, though, because we cannot simply
542 # NOTE: we DO propagate the error, though, because we cannot simply
543 # ignore the error... the caller will know if they can retry.
543 # ignore the error... the caller will know if they can retry.
544 if self.debuglevel > 0:
544 if self.debuglevel > 0:
545 print "send:", repr(str)
545 print "send:", repr(str)
546 try:
546 try:
547 blocksize = 8192
547 blocksize = 8192
548 read = getattr(str, 'read', None)
548 read = getattr(str, 'read', None)
549 if read is not None:
549 if read is not None:
550 if self.debuglevel > 0:
550 if self.debuglevel > 0:
551 print "sendIng a read()able"
551 print "sendIng a read()able"
552 data = read(blocksize)
552 data = read(blocksize)
553 while data:
553 while data:
554 self.sock.sendall(data)
554 self.sock.sendall(data)
555 data = read(blocksize)
555 data = read(blocksize)
556 else:
556 else:
557 self.sock.sendall(str)
557 self.sock.sendall(str)
558 except socket.error, v:
558 except socket.error, v:
559 reraise = True
559 reraise = True
560 if v[0] == errno.EPIPE: # Broken pipe
560 if v[0] == errno.EPIPE: # Broken pipe
561 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
561 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
562 self._broken_pipe_resp = None
562 self._broken_pipe_resp = None
563 self._broken_pipe_resp = self.getresponse()
563 self._broken_pipe_resp = self.getresponse()
564 reraise = False
564 reraise = False
565 self.close()
565 self.close()
566 if reraise:
566 if reraise:
567 raise
567 raise
568
568
569 def wrapgetresponse(cls):
569 def wrapgetresponse(cls):
570 """Wraps getresponse in cls with a broken-pipe sane version.
570 """Wraps getresponse in cls with a broken-pipe sane version.
571 """
571 """
572 def safegetresponse(self):
572 def safegetresponse(self):
573 # In safesend() we might set the _broken_pipe_resp
573 # In safesend() we might set the _broken_pipe_resp
574 # attribute, in which case the socket has already
574 # attribute, in which case the socket has already
575 # been closed and we just need to give them the response
575 # been closed and we just need to give them the response
576 # back. Otherwise, we use the normal response path.
576 # back. Otherwise, we use the normal response path.
577 r = getattr(self, '_broken_pipe_resp', None)
577 r = getattr(self, '_broken_pipe_resp', None)
578 if r is not None:
578 if r is not None:
579 return r
579 return r
580 return cls.getresponse(self)
580 return cls.getresponse(self)
581 safegetresponse.__doc__ = cls.getresponse.__doc__
581 safegetresponse.__doc__ = cls.getresponse.__doc__
582 return safegetresponse
582 return safegetresponse
583
583
584 class HTTPConnection(httplib.HTTPConnection):
584 class HTTPConnection(httplib.HTTPConnection):
585 # use the modified response class
585 # use the modified response class
586 response_class = HTTPResponse
586 response_class = HTTPResponse
587 send = safesend
587 send = safesend
588 getresponse = wrapgetresponse(httplib.HTTPConnection)
588 getresponse = wrapgetresponse(httplib.HTTPConnection)
589
589
590
590
591 #########################################################################
591 #########################################################################
592 ##### TEST FUNCTIONS
592 ##### TEST FUNCTIONS
593 #########################################################################
593 #########################################################################
594
594
595 def error_handler(url):
595 def error_handler(url):
596 global HANDLE_ERRORS
596 global HANDLE_ERRORS
597 orig = HANDLE_ERRORS
597 orig = HANDLE_ERRORS
598 keepalive_handler = HTTPHandler()
598 keepalive_handler = HTTPHandler()
599 opener = urllib2.build_opener(keepalive_handler)
599 opener = urllib2.build_opener(keepalive_handler)
600 urllib2.install_opener(opener)
600 urllib2.install_opener(opener)
601 pos = {0: 'off', 1: 'on'}
601 pos = {0: 'off', 1: 'on'}
602 for i in (0, 1):
602 for i in (0, 1):
603 print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
603 print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
604 HANDLE_ERRORS = i
604 HANDLE_ERRORS = i
605 try:
605 try:
606 fo = urllib2.urlopen(url)
606 fo = urllib2.urlopen(url)
607 fo.read()
607 fo.read()
608 fo.close()
608 fo.close()
609 try:
609 try:
610 status, reason = fo.status, fo.reason
610 status, reason = fo.status, fo.reason
611 except AttributeError:
611 except AttributeError:
612 status, reason = None, None
612 status, reason = None, None
613 except IOError, e:
613 except IOError, e:
614 print " EXCEPTION: %s" % e
614 print " EXCEPTION: %s" % e
615 raise
615 raise
616 else:
616 else:
617 print " status = %s, reason = %s" % (status, reason)
617 print " status = %s, reason = %s" % (status, reason)
618 HANDLE_ERRORS = orig
618 HANDLE_ERRORS = orig
619 hosts = keepalive_handler.open_connections()
619 hosts = keepalive_handler.open_connections()
620 print "open connections:", hosts
620 print "open connections:", hosts
621 keepalive_handler.close_all()
621 keepalive_handler.close_all()
622
622
623 def md5(s):
623 def md5(s):
624 try:
624 try:
625 from hashlib import md5 as _md5
625 from hashlib import md5 as _md5
626 except ImportError:
626 except ImportError:
627 from md5 import md5 as _md5
627 from md5 import md5 as _md5
628 global md5
628 global md5
629 md5 = _md5
629 md5 = _md5
630 return _md5(s)
630 return _md5(s)
631
631
632 def continuity(url):
632 def continuity(url):
633 format = '%25s: %s'
633 format = '%25s: %s'
634
634
635 # first fetch the file with the normal http handler
635 # first fetch the file with the normal http handler
636 opener = urllib2.build_opener()
636 opener = urllib2.build_opener()
637 urllib2.install_opener(opener)
637 urllib2.install_opener(opener)
638 fo = urllib2.urlopen(url)
638 fo = urllib2.urlopen(url)
639 foo = fo.read()
639 foo = fo.read()
640 fo.close()
640 fo.close()
641 m = md5.new(foo)
641 m = md5.new(foo)
642 print format % ('normal urllib', m.hexdigest())
642 print format % ('normal urllib', m.hexdigest())
643
643
644 # now install the keepalive handler and try again
644 # now install the keepalive handler and try again
645 opener = urllib2.build_opener(HTTPHandler())
645 opener = urllib2.build_opener(HTTPHandler())
646 urllib2.install_opener(opener)
646 urllib2.install_opener(opener)
647
647
648 fo = urllib2.urlopen(url)
648 fo = urllib2.urlopen(url)
649 foo = fo.read()
649 foo = fo.read()
650 fo.close()
650 fo.close()
651 m = md5.new(foo)
651 m = md5.new(foo)
652 print format % ('keepalive read', m.hexdigest())
652 print format % ('keepalive read', m.hexdigest())
653
653
654 fo = urllib2.urlopen(url)
654 fo = urllib2.urlopen(url)
655 foo = ''
655 foo = ''
656 while True:
656 while True:
657 f = fo.readline()
657 f = fo.readline()
658 if f:
658 if f:
659 foo = foo + f
659 foo = foo + f
660 else: break
660 else: break
661 fo.close()
661 fo.close()
662 m = md5.new(foo)
662 m = md5.new(foo)
663 print format % ('keepalive readline', m.hexdigest())
663 print format % ('keepalive readline', m.hexdigest())
664
664
665 def comp(N, url):
665 def comp(N, url):
666 print ' making %i connections to:\n %s' % (N, url)
666 print ' making %i connections to:\n %s' % (N, url)
667
667
668 sys.stdout.write(' first using the normal urllib handlers')
668 sys.stdout.write(' first using the normal urllib handlers')
669 # first use normal opener
669 # first use normal opener
670 opener = urllib2.build_opener()
670 opener = urllib2.build_opener()
671 urllib2.install_opener(opener)
671 urllib2.install_opener(opener)
672 t1 = fetch(N, url)
672 t1 = fetch(N, url)
673 print ' TIME: %.3f s' % t1
673 print ' TIME: %.3f s' % t1
674
674
675 sys.stdout.write(' now using the keepalive handler ')
675 sys.stdout.write(' now using the keepalive handler ')
676 # now install the keepalive handler and try again
676 # now install the keepalive handler and try again
677 opener = urllib2.build_opener(HTTPHandler())
677 opener = urllib2.build_opener(HTTPHandler())
678 urllib2.install_opener(opener)
678 urllib2.install_opener(opener)
679 t2 = fetch(N, url)
679 t2 = fetch(N, url)
680 print ' TIME: %.3f s' % t2
680 print ' TIME: %.3f s' % t2
681 print ' improvement factor: %.2f' % (t1 / t2)
681 print ' improvement factor: %.2f' % (t1 / t2)
682
682
683 def fetch(N, url, delay=0):
683 def fetch(N, url, delay=0):
684 import time
684 import time
685 lens = []
685 lens = []
686 starttime = time.time()
686 starttime = time.time()
687 for i in range(N):
687 for i in range(N):
688 if delay and i > 0:
688 if delay and i > 0:
689 time.sleep(delay)
689 time.sleep(delay)
690 fo = urllib2.urlopen(url)
690 fo = urllib2.urlopen(url)
691 foo = fo.read()
691 foo = fo.read()
692 fo.close()
692 fo.close()
693 lens.append(len(foo))
693 lens.append(len(foo))
694 diff = time.time() - starttime
694 diff = time.time() - starttime
695
695
696 j = 0
696 j = 0
697 for i in lens[1:]:
697 for i in lens[1:]:
698 j = j + 1
698 j = j + 1
699 if not i == lens[0]:
699 if not i == lens[0]:
700 print "WARNING: inconsistent length on read %i: %i" % (j, i)
700 print "WARNING: inconsistent length on read %i: %i" % (j, i)
701
701
702 return diff
702 return diff
703
703
704 def test_timeout(url):
704 def test_timeout(url):
705 global DEBUG
705 global DEBUG
706 dbbackup = DEBUG
706 dbbackup = DEBUG
707 class FakeLogger(object):
707 class FakeLogger(object):
708 def debug(self, msg, *args):
708 def debug(self, msg, *args):
709 print msg % args
709 print msg % args
710 info = warning = error = debug
710 info = warning = error = debug
711 DEBUG = FakeLogger()
711 DEBUG = FakeLogger()
712 print " fetching the file to establish a connection"
712 print " fetching the file to establish a connection"
713 fo = urllib2.urlopen(url)
713 fo = urllib2.urlopen(url)
714 data1 = fo.read()
714 data1 = fo.read()
715 fo.close()
715 fo.close()
716
716
717 i = 20
717 i = 20
718 print " waiting %i seconds for the server to close the connection" % i
718 print " waiting %i seconds for the server to close the connection" % i
719 while i > 0:
719 while i > 0:
720 sys.stdout.write('\r %2i' % i)
720 sys.stdout.write('\r %2i' % i)
721 sys.stdout.flush()
721 sys.stdout.flush()
722 time.sleep(1)
722 time.sleep(1)
723 i -= 1
723 i -= 1
724 sys.stderr.write('\r')
724 sys.stderr.write('\r')
725
725
726 print " fetching the file a second time"
726 print " fetching the file a second time"
727 fo = urllib2.urlopen(url)
727 fo = urllib2.urlopen(url)
728 data2 = fo.read()
728 data2 = fo.read()
729 fo.close()
729 fo.close()
730
730
731 if data1 == data2:
731 if data1 == data2:
732 print ' data are identical'
732 print ' data are identical'
733 else:
733 else:
734 print ' ERROR: DATA DIFFER'
734 print ' ERROR: DATA DIFFER'
735
735
736 DEBUG = dbbackup
736 DEBUG = dbbackup
737
737
738
738
739 def test(url, N=10):
739 def test(url, N=10):
740 print "checking error hander (do this on a non-200)"
740 print "checking error hander (do this on a non-200)"
741 try: error_handler(url)
741 try: error_handler(url)
742 except IOError:
742 except IOError:
743 print "exiting - exception will prevent further tests"
743 print "exiting - exception will prevent further tests"
744 sys.exit()
744 sys.exit()
745 print
745 print
746 print "performing continuity test (making sure stuff isn't corrupted)"
746 print "performing continuity test (making sure stuff isn't corrupted)"
747 continuity(url)
747 continuity(url)
748 print
748 print
749 print "performing speed comparison"
749 print "performing speed comparison"
750 comp(N, url)
750 comp(N, url)
751 print
751 print
752 print "performing dropped-connection check"
752 print "performing dropped-connection check"
753 test_timeout(url)
753 test_timeout(url)
754
754
755 if __name__ == '__main__':
755 if __name__ == '__main__':
756 import time
756 import time
757 import sys
757 import sys
758 try:
758 try:
759 N = int(sys.argv[1])
759 N = int(sys.argv[1])
760 url = sys.argv[2]
760 url = sys.argv[2]
761 except:
761 except (IndexError, ValueError):
762 print "%s <integer> <url>" % sys.argv[0]
762 print "%s <integer> <url>" % sys.argv[0]
763 else:
763 else:
764 test(url, N)
764 test(url, N)
@@ -1,233 +1,236 b''
1 # sshrepo.py - ssh repository proxy class for mercurial
1 # sshrepo.py - ssh repository proxy class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import re
8 import re
9 from i18n import _
9 from i18n import _
10 import util, error, wireproto
10 import util, error, wireproto
11
11
12 class remotelock(object):
12 class remotelock(object):
13 def __init__(self, repo):
13 def __init__(self, repo):
14 self.repo = repo
14 self.repo = repo
15 def release(self):
15 def release(self):
16 self.repo.unlock()
16 self.repo.unlock()
17 self.repo = None
17 self.repo = None
18 def __del__(self):
18 def __del__(self):
19 if self.repo:
19 if self.repo:
20 self.release()
20 self.release()
21
21
22 def _serverquote(s):
22 def _serverquote(s):
23 '''quote a string for the remote shell ... which we assume is sh'''
23 '''quote a string for the remote shell ... which we assume is sh'''
24 if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s):
24 if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s):
25 return s
25 return s
26 return "'%s'" % s.replace("'", "'\\''")
26 return "'%s'" % s.replace("'", "'\\''")
27
27
28 class sshrepository(wireproto.wirerepository):
28 class sshrepository(wireproto.wirerepository):
29 def __init__(self, ui, path, create=False):
29 def __init__(self, ui, path, create=False):
30 self._url = path
30 self._url = path
31 self.ui = ui
31 self.ui = ui
32 self.pipeo = self.pipei = self.pipee = None
32
33
33 u = util.url(path, parsequery=False, parsefragment=False)
34 u = util.url(path, parsequery=False, parsefragment=False)
34 if u.scheme != 'ssh' or not u.host or u.path is None:
35 if u.scheme != 'ssh' or not u.host or u.path is None:
35 self._abort(error.RepoError(_("couldn't parse location %s") % path))
36 self._abort(error.RepoError(_("couldn't parse location %s") % path))
36
37
37 self.user = u.user
38 self.user = u.user
38 if u.passwd is not None:
39 if u.passwd is not None:
39 self._abort(error.RepoError(_("password in URL not supported")))
40 self._abort(error.RepoError(_("password in URL not supported")))
40 self.host = u.host
41 self.host = u.host
41 self.port = u.port
42 self.port = u.port
42 self.path = u.path or "."
43 self.path = u.path or "."
43
44
44 sshcmd = self.ui.config("ui", "ssh", "ssh")
45 sshcmd = self.ui.config("ui", "ssh", "ssh")
45 remotecmd = self.ui.config("ui", "remotecmd", "hg")
46 remotecmd = self.ui.config("ui", "remotecmd", "hg")
46
47
47 args = util.sshargs(sshcmd, self.host, self.user, self.port)
48 args = util.sshargs(sshcmd, self.host, self.user, self.port)
48
49
49 if create:
50 if create:
50 cmd = '%s %s %s' % (sshcmd, args,
51 cmd = '%s %s %s' % (sshcmd, args,
51 util.shellquote("%s init %s" %
52 util.shellquote("%s init %s" %
52 (_serverquote(remotecmd), _serverquote(self.path))))
53 (_serverquote(remotecmd), _serverquote(self.path))))
53 ui.note(_('running %s\n') % cmd)
54 ui.note(_('running %s\n') % cmd)
54 res = util.system(cmd)
55 res = util.system(cmd)
55 if res != 0:
56 if res != 0:
56 self._abort(error.RepoError(_("could not create remote repo")))
57 self._abort(error.RepoError(_("could not create remote repo")))
57
58
58 self.validate_repo(ui, sshcmd, args, remotecmd)
59 self.validate_repo(ui, sshcmd, args, remotecmd)
59
60
60 def url(self):
61 def url(self):
61 return self._url
62 return self._url
62
63
63 def validate_repo(self, ui, sshcmd, args, remotecmd):
64 def validate_repo(self, ui, sshcmd, args, remotecmd):
64 # cleanup up previous run
65 # cleanup up previous run
65 self.cleanup()
66 self.cleanup()
66
67
67 cmd = '%s %s %s' % (sshcmd, args,
68 cmd = '%s %s %s' % (sshcmd, args,
68 util.shellquote("%s -R %s serve --stdio" %
69 util.shellquote("%s -R %s serve --stdio" %
69 (_serverquote(remotecmd), _serverquote(self.path))))
70 (_serverquote(remotecmd), _serverquote(self.path))))
70 ui.note(_('running %s\n') % cmd)
71 ui.note(_('running %s\n') % cmd)
71 cmd = util.quotecommand(cmd)
72 cmd = util.quotecommand(cmd)
72 self.pipeo, self.pipei, self.pipee = util.popen3(cmd)
73 self.pipeo, self.pipei, self.pipee = util.popen3(cmd)
73
74
74 # skip any noise generated by remote shell
75 # skip any noise generated by remote shell
75 self._callstream("hello")
76 self._callstream("hello")
76 r = self._callstream("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
77 r = self._callstream("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
77 lines = ["", "dummy"]
78 lines = ["", "dummy"]
78 max_noise = 500
79 max_noise = 500
79 while lines[-1] and max_noise:
80 while lines[-1] and max_noise:
80 l = r.readline()
81 l = r.readline()
81 self.readerr()
82 self.readerr()
82 if lines[-1] == "1\n" and l == "\n":
83 if lines[-1] == "1\n" and l == "\n":
83 break
84 break
84 if l:
85 if l:
85 ui.debug("remote: ", l)
86 ui.debug("remote: ", l)
86 lines.append(l)
87 lines.append(l)
87 max_noise -= 1
88 max_noise -= 1
88 else:
89 else:
89 self._abort(error.RepoError(_('no suitable response from '
90 self._abort(error.RepoError(_('no suitable response from '
90 'remote hg')))
91 'remote hg')))
91
92
92 self.capabilities = set()
93 self.capabilities = set()
93 for l in reversed(lines):
94 for l in reversed(lines):
94 if l.startswith("capabilities:"):
95 if l.startswith("capabilities:"):
95 self.capabilities.update(l[:-1].split(":")[1].split())
96 self.capabilities.update(l[:-1].split(":")[1].split())
96 break
97 break
97
98
98 def readerr(self):
99 def readerr(self):
99 while True:
100 while True:
100 size = util.fstat(self.pipee).st_size
101 size = util.fstat(self.pipee).st_size
101 if size == 0:
102 if size == 0:
102 break
103 break
103 s = self.pipee.read(size)
104 s = self.pipee.read(size)
104 if not s:
105 if not s:
105 break
106 break
106 for l in s.splitlines():
107 for l in s.splitlines():
107 self.ui.status(_("remote: "), l, '\n')
108 self.ui.status(_("remote: "), l, '\n')
108
109
109 def _abort(self, exception):
110 def _abort(self, exception):
110 self.cleanup()
111 self.cleanup()
111 raise exception
112 raise exception
112
113
113 def cleanup(self):
114 def cleanup(self):
115 if self.pipeo is None:
116 return
117 self.pipeo.close()
118 self.pipei.close()
114 try:
119 try:
115 self.pipeo.close()
116 self.pipei.close()
117 # read the error descriptor until EOF
120 # read the error descriptor until EOF
118 for l in self.pipee:
121 for l in self.pipee:
119 self.ui.status(_("remote: "), l)
122 self.ui.status(_("remote: "), l)
120 self.pipee.close()
123 except (IOError, ValueError):
121 except:
122 pass
124 pass
125 self.pipee.close()
123
126
124 __del__ = cleanup
127 __del__ = cleanup
125
128
126 def _callstream(self, cmd, **args):
129 def _callstream(self, cmd, **args):
127 self.ui.debug("sending %s command\n" % cmd)
130 self.ui.debug("sending %s command\n" % cmd)
128 self.pipeo.write("%s\n" % cmd)
131 self.pipeo.write("%s\n" % cmd)
129 _func, names = wireproto.commands[cmd]
132 _func, names = wireproto.commands[cmd]
130 keys = names.split()
133 keys = names.split()
131 wireargs = {}
134 wireargs = {}
132 for k in keys:
135 for k in keys:
133 if k == '*':
136 if k == '*':
134 wireargs['*'] = args
137 wireargs['*'] = args
135 break
138 break
136 else:
139 else:
137 wireargs[k] = args[k]
140 wireargs[k] = args[k]
138 del args[k]
141 del args[k]
139 for k, v in sorted(wireargs.iteritems()):
142 for k, v in sorted(wireargs.iteritems()):
140 self.pipeo.write("%s %d\n" % (k, len(v)))
143 self.pipeo.write("%s %d\n" % (k, len(v)))
141 if isinstance(v, dict):
144 if isinstance(v, dict):
142 for dk, dv in v.iteritems():
145 for dk, dv in v.iteritems():
143 self.pipeo.write("%s %d\n" % (dk, len(dv)))
146 self.pipeo.write("%s %d\n" % (dk, len(dv)))
144 self.pipeo.write(dv)
147 self.pipeo.write(dv)
145 else:
148 else:
146 self.pipeo.write(v)
149 self.pipeo.write(v)
147 self.pipeo.flush()
150 self.pipeo.flush()
148
151
149 return self.pipei
152 return self.pipei
150
153
151 def _call(self, cmd, **args):
154 def _call(self, cmd, **args):
152 self._callstream(cmd, **args)
155 self._callstream(cmd, **args)
153 return self._recv()
156 return self._recv()
154
157
155 def _callpush(self, cmd, fp, **args):
158 def _callpush(self, cmd, fp, **args):
156 r = self._call(cmd, **args)
159 r = self._call(cmd, **args)
157 if r:
160 if r:
158 return '', r
161 return '', r
159 while True:
162 while True:
160 d = fp.read(4096)
163 d = fp.read(4096)
161 if not d:
164 if not d:
162 break
165 break
163 self._send(d)
166 self._send(d)
164 self._send("", flush=True)
167 self._send("", flush=True)
165 r = self._recv()
168 r = self._recv()
166 if r:
169 if r:
167 return '', r
170 return '', r
168 return self._recv(), ''
171 return self._recv(), ''
169
172
170 def _decompress(self, stream):
173 def _decompress(self, stream):
171 return stream
174 return stream
172
175
173 def _recv(self):
176 def _recv(self):
174 l = self.pipei.readline()
177 l = self.pipei.readline()
175 if l == '\n':
178 if l == '\n':
176 err = []
179 err = []
177 while True:
180 while True:
178 line = self.pipee.readline()
181 line = self.pipee.readline()
179 if line == '-\n':
182 if line == '-\n':
180 break
183 break
181 err.extend([line])
184 err.extend([line])
182 if len(err) > 0:
185 if len(err) > 0:
183 # strip the trailing newline added to the last line server-side
186 # strip the trailing newline added to the last line server-side
184 err[-1] = err[-1][:-1]
187 err[-1] = err[-1][:-1]
185 self._abort(error.OutOfBandError(*err))
188 self._abort(error.OutOfBandError(*err))
186 self.readerr()
189 self.readerr()
187 try:
190 try:
188 l = int(l)
191 l = int(l)
189 except ValueError:
192 except ValueError:
190 self._abort(error.ResponseError(_("unexpected response:"), l))
193 self._abort(error.ResponseError(_("unexpected response:"), l))
191 return self.pipei.read(l)
194 return self.pipei.read(l)
192
195
193 def _send(self, data, flush=False):
196 def _send(self, data, flush=False):
194 self.pipeo.write("%d\n" % len(data))
197 self.pipeo.write("%d\n" % len(data))
195 if data:
198 if data:
196 self.pipeo.write(data)
199 self.pipeo.write(data)
197 if flush:
200 if flush:
198 self.pipeo.flush()
201 self.pipeo.flush()
199 self.readerr()
202 self.readerr()
200
203
201 def lock(self):
204 def lock(self):
202 self._call("lock")
205 self._call("lock")
203 return remotelock(self)
206 return remotelock(self)
204
207
205 def unlock(self):
208 def unlock(self):
206 self._call("unlock")
209 self._call("unlock")
207
210
208 def addchangegroup(self, cg, source, url, lock=None):
211 def addchangegroup(self, cg, source, url, lock=None):
209 '''Send a changegroup to the remote server. Return an integer
212 '''Send a changegroup to the remote server. Return an integer
210 similar to unbundle(). DEPRECATED, since it requires locking the
213 similar to unbundle(). DEPRECATED, since it requires locking the
211 remote.'''
214 remote.'''
212 d = self._call("addchangegroup")
215 d = self._call("addchangegroup")
213 if d:
216 if d:
214 self._abort(error.RepoError(_("push refused: %s") % d))
217 self._abort(error.RepoError(_("push refused: %s") % d))
215 while True:
218 while True:
216 d = cg.read(4096)
219 d = cg.read(4096)
217 if not d:
220 if not d:
218 break
221 break
219 self.pipeo.write(d)
222 self.pipeo.write(d)
220 self.readerr()
223 self.readerr()
221
224
222 self.pipeo.flush()
225 self.pipeo.flush()
223
226
224 self.readerr()
227 self.readerr()
225 r = self._recv()
228 r = self._recv()
226 if not r:
229 if not r:
227 return 1
230 return 1
228 try:
231 try:
229 return int(r)
232 return int(r)
230 except ValueError:
233 except ValueError:
231 self._abort(error.ResponseError(_("unexpected response:"), r))
234 self._abort(error.ResponseError(_("unexpected response:"), r))
232
235
233 instance = sshrepository
236 instance = sshrepository
@@ -1,1766 +1,1766 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, time, datetime, calendar, textwrap, signal
19 import os, time, datetime, calendar, textwrap, signal
20 import imp, socket, urllib
20 import imp, socket, urllib
21
21
22 if os.name == 'nt':
22 if os.name == 'nt':
23 import windows as platform
23 import windows as platform
24 else:
24 else:
25 import posix as platform
25 import posix as platform
26
26
27 platform.encodinglower = encoding.lower
27 platform.encodinglower = encoding.lower
28 platform.encodingupper = encoding.upper
28 platform.encodingupper = encoding.upper
29
29
30 cachestat = platform.cachestat
30 cachestat = platform.cachestat
31 checkexec = platform.checkexec
31 checkexec = platform.checkexec
32 checklink = platform.checklink
32 checklink = platform.checklink
33 copymode = platform.copymode
33 copymode = platform.copymode
34 executablepath = platform.executablepath
34 executablepath = platform.executablepath
35 expandglobs = platform.expandglobs
35 expandglobs = platform.expandglobs
36 explainexit = platform.explainexit
36 explainexit = platform.explainexit
37 findexe = platform.findexe
37 findexe = platform.findexe
38 gethgcmd = platform.gethgcmd
38 gethgcmd = platform.gethgcmd
39 getuser = platform.getuser
39 getuser = platform.getuser
40 groupmembers = platform.groupmembers
40 groupmembers = platform.groupmembers
41 groupname = platform.groupname
41 groupname = platform.groupname
42 hidewindow = platform.hidewindow
42 hidewindow = platform.hidewindow
43 isexec = platform.isexec
43 isexec = platform.isexec
44 isowner = platform.isowner
44 isowner = platform.isowner
45 localpath = platform.localpath
45 localpath = platform.localpath
46 lookupreg = platform.lookupreg
46 lookupreg = platform.lookupreg
47 makedir = platform.makedir
47 makedir = platform.makedir
48 nlinks = platform.nlinks
48 nlinks = platform.nlinks
49 normpath = platform.normpath
49 normpath = platform.normpath
50 normcase = platform.normcase
50 normcase = platform.normcase
51 nulldev = platform.nulldev
51 nulldev = platform.nulldev
52 openhardlinks = platform.openhardlinks
52 openhardlinks = platform.openhardlinks
53 oslink = platform.oslink
53 oslink = platform.oslink
54 parsepatchoutput = platform.parsepatchoutput
54 parsepatchoutput = platform.parsepatchoutput
55 pconvert = platform.pconvert
55 pconvert = platform.pconvert
56 popen = platform.popen
56 popen = platform.popen
57 posixfile = platform.posixfile
57 posixfile = platform.posixfile
58 quotecommand = platform.quotecommand
58 quotecommand = platform.quotecommand
59 realpath = platform.realpath
59 realpath = platform.realpath
60 rename = platform.rename
60 rename = platform.rename
61 samedevice = platform.samedevice
61 samedevice = platform.samedevice
62 samefile = platform.samefile
62 samefile = platform.samefile
63 samestat = platform.samestat
63 samestat = platform.samestat
64 setbinary = platform.setbinary
64 setbinary = platform.setbinary
65 setflags = platform.setflags
65 setflags = platform.setflags
66 setsignalhandler = platform.setsignalhandler
66 setsignalhandler = platform.setsignalhandler
67 shellquote = platform.shellquote
67 shellquote = platform.shellquote
68 spawndetached = platform.spawndetached
68 spawndetached = platform.spawndetached
69 sshargs = platform.sshargs
69 sshargs = platform.sshargs
70 statfiles = platform.statfiles
70 statfiles = platform.statfiles
71 termwidth = platform.termwidth
71 termwidth = platform.termwidth
72 testpid = platform.testpid
72 testpid = platform.testpid
73 umask = platform.umask
73 umask = platform.umask
74 unlink = platform.unlink
74 unlink = platform.unlink
75 unlinkpath = platform.unlinkpath
75 unlinkpath = platform.unlinkpath
76 username = platform.username
76 username = platform.username
77
77
78 # Python compatibility
78 # Python compatibility
79
79
80 _notset = object()
80 _notset = object()
81
81
82 def safehasattr(thing, attr):
82 def safehasattr(thing, attr):
83 return getattr(thing, attr, _notset) is not _notset
83 return getattr(thing, attr, _notset) is not _notset
84
84
85 def sha1(s=''):
85 def sha1(s=''):
86 '''
86 '''
87 Low-overhead wrapper around Python's SHA support
87 Low-overhead wrapper around Python's SHA support
88
88
89 >>> f = _fastsha1
89 >>> f = _fastsha1
90 >>> a = sha1()
90 >>> a = sha1()
91 >>> a = f()
91 >>> a = f()
92 >>> a.hexdigest()
92 >>> a.hexdigest()
93 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
93 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
94 '''
94 '''
95
95
96 return _fastsha1(s)
96 return _fastsha1(s)
97
97
98 def _fastsha1(s=''):
98 def _fastsha1(s=''):
99 # This function will import sha1 from hashlib or sha (whichever is
99 # This function will import sha1 from hashlib or sha (whichever is
100 # available) and overwrite itself with it on the first call.
100 # available) and overwrite itself with it on the first call.
101 # Subsequent calls will go directly to the imported function.
101 # Subsequent calls will go directly to the imported function.
102 if sys.version_info >= (2, 5):
102 if sys.version_info >= (2, 5):
103 from hashlib import sha1 as _sha1
103 from hashlib import sha1 as _sha1
104 else:
104 else:
105 from sha import sha as _sha1
105 from sha import sha as _sha1
106 global _fastsha1, sha1
106 global _fastsha1, sha1
107 _fastsha1 = sha1 = _sha1
107 _fastsha1 = sha1 = _sha1
108 return _sha1(s)
108 return _sha1(s)
109
109
110 try:
110 try:
111 buffer = buffer
111 buffer = buffer
112 except NameError:
112 except NameError:
113 if sys.version_info[0] < 3:
113 if sys.version_info[0] < 3:
114 def buffer(sliceable, offset=0):
114 def buffer(sliceable, offset=0):
115 return sliceable[offset:]
115 return sliceable[offset:]
116 else:
116 else:
117 def buffer(sliceable, offset=0):
117 def buffer(sliceable, offset=0):
118 return memoryview(sliceable)[offset:]
118 return memoryview(sliceable)[offset:]
119
119
120 import subprocess
120 import subprocess
121 closefds = os.name == 'posix'
121 closefds = os.name == 'posix'
122
122
123 def popen2(cmd, env=None, newlines=False):
123 def popen2(cmd, env=None, newlines=False):
124 # Setting bufsize to -1 lets the system decide the buffer size.
124 # Setting bufsize to -1 lets the system decide the buffer size.
125 # The default for bufsize is 0, meaning unbuffered. This leads to
125 # The default for bufsize is 0, meaning unbuffered. This leads to
126 # poor performance on Mac OS X: http://bugs.python.org/issue4194
126 # poor performance on Mac OS X: http://bugs.python.org/issue4194
127 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
127 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
128 close_fds=closefds,
128 close_fds=closefds,
129 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
129 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
130 universal_newlines=newlines,
130 universal_newlines=newlines,
131 env=env)
131 env=env)
132 return p.stdin, p.stdout
132 return p.stdin, p.stdout
133
133
134 def popen3(cmd, env=None, newlines=False):
134 def popen3(cmd, env=None, newlines=False):
135 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
135 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
136 close_fds=closefds,
136 close_fds=closefds,
137 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
137 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
138 stderr=subprocess.PIPE,
138 stderr=subprocess.PIPE,
139 universal_newlines=newlines,
139 universal_newlines=newlines,
140 env=env)
140 env=env)
141 return p.stdin, p.stdout, p.stderr
141 return p.stdin, p.stdout, p.stderr
142
142
143 def version():
143 def version():
144 """Return version information if available."""
144 """Return version information if available."""
145 try:
145 try:
146 import __version__
146 import __version__
147 return __version__.version
147 return __version__.version
148 except ImportError:
148 except ImportError:
149 return 'unknown'
149 return 'unknown'
150
150
151 # used by parsedate
151 # used by parsedate
152 defaultdateformats = (
152 defaultdateformats = (
153 '%Y-%m-%d %H:%M:%S',
153 '%Y-%m-%d %H:%M:%S',
154 '%Y-%m-%d %I:%M:%S%p',
154 '%Y-%m-%d %I:%M:%S%p',
155 '%Y-%m-%d %H:%M',
155 '%Y-%m-%d %H:%M',
156 '%Y-%m-%d %I:%M%p',
156 '%Y-%m-%d %I:%M%p',
157 '%Y-%m-%d',
157 '%Y-%m-%d',
158 '%m-%d',
158 '%m-%d',
159 '%m/%d',
159 '%m/%d',
160 '%m/%d/%y',
160 '%m/%d/%y',
161 '%m/%d/%Y',
161 '%m/%d/%Y',
162 '%a %b %d %H:%M:%S %Y',
162 '%a %b %d %H:%M:%S %Y',
163 '%a %b %d %I:%M:%S%p %Y',
163 '%a %b %d %I:%M:%S%p %Y',
164 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
164 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
165 '%b %d %H:%M:%S %Y',
165 '%b %d %H:%M:%S %Y',
166 '%b %d %I:%M:%S%p %Y',
166 '%b %d %I:%M:%S%p %Y',
167 '%b %d %H:%M:%S',
167 '%b %d %H:%M:%S',
168 '%b %d %I:%M:%S%p',
168 '%b %d %I:%M:%S%p',
169 '%b %d %H:%M',
169 '%b %d %H:%M',
170 '%b %d %I:%M%p',
170 '%b %d %I:%M%p',
171 '%b %d %Y',
171 '%b %d %Y',
172 '%b %d',
172 '%b %d',
173 '%H:%M:%S',
173 '%H:%M:%S',
174 '%I:%M:%S%p',
174 '%I:%M:%S%p',
175 '%H:%M',
175 '%H:%M',
176 '%I:%M%p',
176 '%I:%M%p',
177 )
177 )
178
178
179 extendeddateformats = defaultdateformats + (
179 extendeddateformats = defaultdateformats + (
180 "%Y",
180 "%Y",
181 "%Y-%m",
181 "%Y-%m",
182 "%b",
182 "%b",
183 "%b %Y",
183 "%b %Y",
184 )
184 )
185
185
186 def cachefunc(func):
186 def cachefunc(func):
187 '''cache the result of function calls'''
187 '''cache the result of function calls'''
188 # XXX doesn't handle keywords args
188 # XXX doesn't handle keywords args
189 cache = {}
189 cache = {}
190 if func.func_code.co_argcount == 1:
190 if func.func_code.co_argcount == 1:
191 # we gain a small amount of time because
191 # we gain a small amount of time because
192 # we don't need to pack/unpack the list
192 # we don't need to pack/unpack the list
193 def f(arg):
193 def f(arg):
194 if arg not in cache:
194 if arg not in cache:
195 cache[arg] = func(arg)
195 cache[arg] = func(arg)
196 return cache[arg]
196 return cache[arg]
197 else:
197 else:
198 def f(*args):
198 def f(*args):
199 if args not in cache:
199 if args not in cache:
200 cache[args] = func(*args)
200 cache[args] = func(*args)
201 return cache[args]
201 return cache[args]
202
202
203 return f
203 return f
204
204
205 def lrucachefunc(func):
205 def lrucachefunc(func):
206 '''cache most recent results of function calls'''
206 '''cache most recent results of function calls'''
207 cache = {}
207 cache = {}
208 order = []
208 order = []
209 if func.func_code.co_argcount == 1:
209 if func.func_code.co_argcount == 1:
210 def f(arg):
210 def f(arg):
211 if arg not in cache:
211 if arg not in cache:
212 if len(cache) > 20:
212 if len(cache) > 20:
213 del cache[order.pop(0)]
213 del cache[order.pop(0)]
214 cache[arg] = func(arg)
214 cache[arg] = func(arg)
215 else:
215 else:
216 order.remove(arg)
216 order.remove(arg)
217 order.append(arg)
217 order.append(arg)
218 return cache[arg]
218 return cache[arg]
219 else:
219 else:
220 def f(*args):
220 def f(*args):
221 if args not in cache:
221 if args not in cache:
222 if len(cache) > 20:
222 if len(cache) > 20:
223 del cache[order.pop(0)]
223 del cache[order.pop(0)]
224 cache[args] = func(*args)
224 cache[args] = func(*args)
225 else:
225 else:
226 order.remove(args)
226 order.remove(args)
227 order.append(args)
227 order.append(args)
228 return cache[args]
228 return cache[args]
229
229
230 return f
230 return f
231
231
232 class propertycache(object):
232 class propertycache(object):
233 def __init__(self, func):
233 def __init__(self, func):
234 self.func = func
234 self.func = func
235 self.name = func.__name__
235 self.name = func.__name__
236 def __get__(self, obj, type=None):
236 def __get__(self, obj, type=None):
237 result = self.func(obj)
237 result = self.func(obj)
238 setattr(obj, self.name, result)
238 setattr(obj, self.name, result)
239 return result
239 return result
240
240
241 def pipefilter(s, cmd):
241 def pipefilter(s, cmd):
242 '''filter string S through command CMD, returning its output'''
242 '''filter string S through command CMD, returning its output'''
243 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
243 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
244 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
244 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
245 pout, perr = p.communicate(s)
245 pout, perr = p.communicate(s)
246 return pout
246 return pout
247
247
248 def tempfilter(s, cmd):
248 def tempfilter(s, cmd):
249 '''filter string S through a pair of temporary files with CMD.
249 '''filter string S through a pair of temporary files with CMD.
250 CMD is used as a template to create the real command to be run,
250 CMD is used as a template to create the real command to be run,
251 with the strings INFILE and OUTFILE replaced by the real names of
251 with the strings INFILE and OUTFILE replaced by the real names of
252 the temporary files generated.'''
252 the temporary files generated.'''
253 inname, outname = None, None
253 inname, outname = None, None
254 try:
254 try:
255 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
255 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
256 fp = os.fdopen(infd, 'wb')
256 fp = os.fdopen(infd, 'wb')
257 fp.write(s)
257 fp.write(s)
258 fp.close()
258 fp.close()
259 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
259 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
260 os.close(outfd)
260 os.close(outfd)
261 cmd = cmd.replace('INFILE', inname)
261 cmd = cmd.replace('INFILE', inname)
262 cmd = cmd.replace('OUTFILE', outname)
262 cmd = cmd.replace('OUTFILE', outname)
263 code = os.system(cmd)
263 code = os.system(cmd)
264 if sys.platform == 'OpenVMS' and code & 1:
264 if sys.platform == 'OpenVMS' and code & 1:
265 code = 0
265 code = 0
266 if code:
266 if code:
267 raise Abort(_("command '%s' failed: %s") %
267 raise Abort(_("command '%s' failed: %s") %
268 (cmd, explainexit(code)))
268 (cmd, explainexit(code)))
269 fp = open(outname, 'rb')
269 fp = open(outname, 'rb')
270 r = fp.read()
270 r = fp.read()
271 fp.close()
271 fp.close()
272 return r
272 return r
273 finally:
273 finally:
274 try:
274 try:
275 if inname:
275 if inname:
276 os.unlink(inname)
276 os.unlink(inname)
277 except OSError:
277 except OSError:
278 pass
278 pass
279 try:
279 try:
280 if outname:
280 if outname:
281 os.unlink(outname)
281 os.unlink(outname)
282 except OSError:
282 except OSError:
283 pass
283 pass
284
284
285 filtertable = {
285 filtertable = {
286 'tempfile:': tempfilter,
286 'tempfile:': tempfilter,
287 'pipe:': pipefilter,
287 'pipe:': pipefilter,
288 }
288 }
289
289
290 def filter(s, cmd):
290 def filter(s, cmd):
291 "filter a string through a command that transforms its input to its output"
291 "filter a string through a command that transforms its input to its output"
292 for name, fn in filtertable.iteritems():
292 for name, fn in filtertable.iteritems():
293 if cmd.startswith(name):
293 if cmd.startswith(name):
294 return fn(s, cmd[len(name):].lstrip())
294 return fn(s, cmd[len(name):].lstrip())
295 return pipefilter(s, cmd)
295 return pipefilter(s, cmd)
296
296
297 def binary(s):
297 def binary(s):
298 """return true if a string is binary data"""
298 """return true if a string is binary data"""
299 return bool(s and '\0' in s)
299 return bool(s and '\0' in s)
300
300
301 def increasingchunks(source, min=1024, max=65536):
301 def increasingchunks(source, min=1024, max=65536):
302 '''return no less than min bytes per chunk while data remains,
302 '''return no less than min bytes per chunk while data remains,
303 doubling min after each chunk until it reaches max'''
303 doubling min after each chunk until it reaches max'''
304 def log2(x):
304 def log2(x):
305 if not x:
305 if not x:
306 return 0
306 return 0
307 i = 0
307 i = 0
308 while x:
308 while x:
309 x >>= 1
309 x >>= 1
310 i += 1
310 i += 1
311 return i - 1
311 return i - 1
312
312
313 buf = []
313 buf = []
314 blen = 0
314 blen = 0
315 for chunk in source:
315 for chunk in source:
316 buf.append(chunk)
316 buf.append(chunk)
317 blen += len(chunk)
317 blen += len(chunk)
318 if blen >= min:
318 if blen >= min:
319 if min < max:
319 if min < max:
320 min = min << 1
320 min = min << 1
321 nmin = 1 << log2(blen)
321 nmin = 1 << log2(blen)
322 if nmin > min:
322 if nmin > min:
323 min = nmin
323 min = nmin
324 if min > max:
324 if min > max:
325 min = max
325 min = max
326 yield ''.join(buf)
326 yield ''.join(buf)
327 blen = 0
327 blen = 0
328 buf = []
328 buf = []
329 if buf:
329 if buf:
330 yield ''.join(buf)
330 yield ''.join(buf)
331
331
332 Abort = error.Abort
332 Abort = error.Abort
333
333
334 def always(fn):
334 def always(fn):
335 return True
335 return True
336
336
337 def never(fn):
337 def never(fn):
338 return False
338 return False
339
339
340 def pathto(root, n1, n2):
340 def pathto(root, n1, n2):
341 '''return the relative path from one place to another.
341 '''return the relative path from one place to another.
342 root should use os.sep to separate directories
342 root should use os.sep to separate directories
343 n1 should use os.sep to separate directories
343 n1 should use os.sep to separate directories
344 n2 should use "/" to separate directories
344 n2 should use "/" to separate directories
345 returns an os.sep-separated path.
345 returns an os.sep-separated path.
346
346
347 If n1 is a relative path, it's assumed it's
347 If n1 is a relative path, it's assumed it's
348 relative to root.
348 relative to root.
349 n2 should always be relative to root.
349 n2 should always be relative to root.
350 '''
350 '''
351 if not n1:
351 if not n1:
352 return localpath(n2)
352 return localpath(n2)
353 if os.path.isabs(n1):
353 if os.path.isabs(n1):
354 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
354 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
355 return os.path.join(root, localpath(n2))
355 return os.path.join(root, localpath(n2))
356 n2 = '/'.join((pconvert(root), n2))
356 n2 = '/'.join((pconvert(root), n2))
357 a, b = splitpath(n1), n2.split('/')
357 a, b = splitpath(n1), n2.split('/')
358 a.reverse()
358 a.reverse()
359 b.reverse()
359 b.reverse()
360 while a and b and a[-1] == b[-1]:
360 while a and b and a[-1] == b[-1]:
361 a.pop()
361 a.pop()
362 b.pop()
362 b.pop()
363 b.reverse()
363 b.reverse()
364 return os.sep.join((['..'] * len(a)) + b) or '.'
364 return os.sep.join((['..'] * len(a)) + b) or '.'
365
365
366 _hgexecutable = None
366 _hgexecutable = None
367
367
368 def mainfrozen():
368 def mainfrozen():
369 """return True if we are a frozen executable.
369 """return True if we are a frozen executable.
370
370
371 The code supports py2exe (most common, Windows only) and tools/freeze
371 The code supports py2exe (most common, Windows only) and tools/freeze
372 (portable, not much used).
372 (portable, not much used).
373 """
373 """
374 return (safehasattr(sys, "frozen") or # new py2exe
374 return (safehasattr(sys, "frozen") or # new py2exe
375 safehasattr(sys, "importers") or # old py2exe
375 safehasattr(sys, "importers") or # old py2exe
376 imp.is_frozen("__main__")) # tools/freeze
376 imp.is_frozen("__main__")) # tools/freeze
377
377
378 def hgexecutable():
378 def hgexecutable():
379 """return location of the 'hg' executable.
379 """return location of the 'hg' executable.
380
380
381 Defaults to $HG or 'hg' in the search path.
381 Defaults to $HG or 'hg' in the search path.
382 """
382 """
383 if _hgexecutable is None:
383 if _hgexecutable is None:
384 hg = os.environ.get('HG')
384 hg = os.environ.get('HG')
385 mainmod = sys.modules['__main__']
385 mainmod = sys.modules['__main__']
386 if hg:
386 if hg:
387 _sethgexecutable(hg)
387 _sethgexecutable(hg)
388 elif mainfrozen():
388 elif mainfrozen():
389 _sethgexecutable(sys.executable)
389 _sethgexecutable(sys.executable)
390 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
390 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
391 _sethgexecutable(mainmod.__file__)
391 _sethgexecutable(mainmod.__file__)
392 else:
392 else:
393 exe = findexe('hg') or os.path.basename(sys.argv[0])
393 exe = findexe('hg') or os.path.basename(sys.argv[0])
394 _sethgexecutable(exe)
394 _sethgexecutable(exe)
395 return _hgexecutable
395 return _hgexecutable
396
396
397 def _sethgexecutable(path):
397 def _sethgexecutable(path):
398 """set location of the 'hg' executable"""
398 """set location of the 'hg' executable"""
399 global _hgexecutable
399 global _hgexecutable
400 _hgexecutable = path
400 _hgexecutable = path
401
401
402 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
402 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
403 '''enhanced shell command execution.
403 '''enhanced shell command execution.
404 run with environment maybe modified, maybe in different dir.
404 run with environment maybe modified, maybe in different dir.
405
405
406 if command fails and onerr is None, return status. if ui object,
406 if command fails and onerr is None, return status. if ui object,
407 print error message and return status, else raise onerr object as
407 print error message and return status, else raise onerr object as
408 exception.
408 exception.
409
409
410 if out is specified, it is assumed to be a file-like object that has a
410 if out is specified, it is assumed to be a file-like object that has a
411 write() method. stdout and stderr will be redirected to out.'''
411 write() method. stdout and stderr will be redirected to out.'''
412 try:
412 try:
413 sys.stdout.flush()
413 sys.stdout.flush()
414 except Exception:
414 except Exception:
415 pass
415 pass
416 def py2shell(val):
416 def py2shell(val):
417 'convert python object into string that is useful to shell'
417 'convert python object into string that is useful to shell'
418 if val is None or val is False:
418 if val is None or val is False:
419 return '0'
419 return '0'
420 if val is True:
420 if val is True:
421 return '1'
421 return '1'
422 return str(val)
422 return str(val)
423 origcmd = cmd
423 origcmd = cmd
424 cmd = quotecommand(cmd)
424 cmd = quotecommand(cmd)
425 if sys.platform == 'plan9':
425 if sys.platform == 'plan9':
426 # subprocess kludge to work around issues in half-baked Python
426 # subprocess kludge to work around issues in half-baked Python
427 # ports, notably bichued/python:
427 # ports, notably bichued/python:
428 if not cwd is None:
428 if not cwd is None:
429 os.chdir(cwd)
429 os.chdir(cwd)
430 rc = os.system(cmd)
430 rc = os.system(cmd)
431 else:
431 else:
432 env = dict(os.environ)
432 env = dict(os.environ)
433 env.update((k, py2shell(v)) for k, v in environ.iteritems())
433 env.update((k, py2shell(v)) for k, v in environ.iteritems())
434 env['HG'] = hgexecutable()
434 env['HG'] = hgexecutable()
435 if out is None or out == sys.__stdout__:
435 if out is None or out == sys.__stdout__:
436 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
436 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
437 env=env, cwd=cwd)
437 env=env, cwd=cwd)
438 else:
438 else:
439 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
439 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
440 env=env, cwd=cwd, stdout=subprocess.PIPE,
440 env=env, cwd=cwd, stdout=subprocess.PIPE,
441 stderr=subprocess.STDOUT)
441 stderr=subprocess.STDOUT)
442 for line in proc.stdout:
442 for line in proc.stdout:
443 out.write(line)
443 out.write(line)
444 proc.wait()
444 proc.wait()
445 rc = proc.returncode
445 rc = proc.returncode
446 if sys.platform == 'OpenVMS' and rc & 1:
446 if sys.platform == 'OpenVMS' and rc & 1:
447 rc = 0
447 rc = 0
448 if rc and onerr:
448 if rc and onerr:
449 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
449 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
450 explainexit(rc)[0])
450 explainexit(rc)[0])
451 if errprefix:
451 if errprefix:
452 errmsg = '%s: %s' % (errprefix, errmsg)
452 errmsg = '%s: %s' % (errprefix, errmsg)
453 try:
453 try:
454 onerr.warn(errmsg + '\n')
454 onerr.warn(errmsg + '\n')
455 except AttributeError:
455 except AttributeError:
456 raise onerr(errmsg)
456 raise onerr(errmsg)
457 return rc
457 return rc
458
458
459 def checksignature(func):
459 def checksignature(func):
460 '''wrap a function with code to check for calling errors'''
460 '''wrap a function with code to check for calling errors'''
461 def check(*args, **kwargs):
461 def check(*args, **kwargs):
462 try:
462 try:
463 return func(*args, **kwargs)
463 return func(*args, **kwargs)
464 except TypeError:
464 except TypeError:
465 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
465 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
466 raise error.SignatureError
466 raise error.SignatureError
467 raise
467 raise
468
468
469 return check
469 return check
470
470
471 def copyfile(src, dest):
471 def copyfile(src, dest):
472 "copy a file, preserving mode and atime/mtime"
472 "copy a file, preserving mode and atime/mtime"
473 if os.path.islink(src):
473 if os.path.islink(src):
474 try:
474 try:
475 os.unlink(dest)
475 os.unlink(dest)
476 except OSError:
476 except OSError:
477 pass
477 pass
478 os.symlink(os.readlink(src), dest)
478 os.symlink(os.readlink(src), dest)
479 else:
479 else:
480 try:
480 try:
481 shutil.copyfile(src, dest)
481 shutil.copyfile(src, dest)
482 shutil.copymode(src, dest)
482 shutil.copymode(src, dest)
483 except shutil.Error, inst:
483 except shutil.Error, inst:
484 raise Abort(str(inst))
484 raise Abort(str(inst))
485
485
486 def copyfiles(src, dst, hardlink=None):
486 def copyfiles(src, dst, hardlink=None):
487 """Copy a directory tree using hardlinks if possible"""
487 """Copy a directory tree using hardlinks if possible"""
488
488
489 if hardlink is None:
489 if hardlink is None:
490 hardlink = (os.stat(src).st_dev ==
490 hardlink = (os.stat(src).st_dev ==
491 os.stat(os.path.dirname(dst)).st_dev)
491 os.stat(os.path.dirname(dst)).st_dev)
492
492
493 num = 0
493 num = 0
494 if os.path.isdir(src):
494 if os.path.isdir(src):
495 os.mkdir(dst)
495 os.mkdir(dst)
496 for name, kind in osutil.listdir(src):
496 for name, kind in osutil.listdir(src):
497 srcname = os.path.join(src, name)
497 srcname = os.path.join(src, name)
498 dstname = os.path.join(dst, name)
498 dstname = os.path.join(dst, name)
499 hardlink, n = copyfiles(srcname, dstname, hardlink)
499 hardlink, n = copyfiles(srcname, dstname, hardlink)
500 num += n
500 num += n
501 else:
501 else:
502 if hardlink:
502 if hardlink:
503 try:
503 try:
504 oslink(src, dst)
504 oslink(src, dst)
505 except (IOError, OSError):
505 except (IOError, OSError):
506 hardlink = False
506 hardlink = False
507 shutil.copy(src, dst)
507 shutil.copy(src, dst)
508 else:
508 else:
509 shutil.copy(src, dst)
509 shutil.copy(src, dst)
510 num += 1
510 num += 1
511
511
512 return hardlink, num
512 return hardlink, num
513
513
514 _winreservednames = '''con prn aux nul
514 _winreservednames = '''con prn aux nul
515 com1 com2 com3 com4 com5 com6 com7 com8 com9
515 com1 com2 com3 com4 com5 com6 com7 com8 com9
516 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
516 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
517 _winreservedchars = ':*?"<>|'
517 _winreservedchars = ':*?"<>|'
518 def checkwinfilename(path):
518 def checkwinfilename(path):
519 '''Check that the base-relative path is a valid filename on Windows.
519 '''Check that the base-relative path is a valid filename on Windows.
520 Returns None if the path is ok, or a UI string describing the problem.
520 Returns None if the path is ok, or a UI string describing the problem.
521
521
522 >>> checkwinfilename("just/a/normal/path")
522 >>> checkwinfilename("just/a/normal/path")
523 >>> checkwinfilename("foo/bar/con.xml")
523 >>> checkwinfilename("foo/bar/con.xml")
524 "filename contains 'con', which is reserved on Windows"
524 "filename contains 'con', which is reserved on Windows"
525 >>> checkwinfilename("foo/con.xml/bar")
525 >>> checkwinfilename("foo/con.xml/bar")
526 "filename contains 'con', which is reserved on Windows"
526 "filename contains 'con', which is reserved on Windows"
527 >>> checkwinfilename("foo/bar/xml.con")
527 >>> checkwinfilename("foo/bar/xml.con")
528 >>> checkwinfilename("foo/bar/AUX/bla.txt")
528 >>> checkwinfilename("foo/bar/AUX/bla.txt")
529 "filename contains 'AUX', which is reserved on Windows"
529 "filename contains 'AUX', which is reserved on Windows"
530 >>> checkwinfilename("foo/bar/bla:.txt")
530 >>> checkwinfilename("foo/bar/bla:.txt")
531 "filename contains ':', which is reserved on Windows"
531 "filename contains ':', which is reserved on Windows"
532 >>> checkwinfilename("foo/bar/b\07la.txt")
532 >>> checkwinfilename("foo/bar/b\07la.txt")
533 "filename contains '\\\\x07', which is invalid on Windows"
533 "filename contains '\\\\x07', which is invalid on Windows"
534 >>> checkwinfilename("foo/bar/bla ")
534 >>> checkwinfilename("foo/bar/bla ")
535 "filename ends with ' ', which is not allowed on Windows"
535 "filename ends with ' ', which is not allowed on Windows"
536 >>> checkwinfilename("../bar")
536 >>> checkwinfilename("../bar")
537 '''
537 '''
538 for n in path.replace('\\', '/').split('/'):
538 for n in path.replace('\\', '/').split('/'):
539 if not n:
539 if not n:
540 continue
540 continue
541 for c in n:
541 for c in n:
542 if c in _winreservedchars:
542 if c in _winreservedchars:
543 return _("filename contains '%s', which is reserved "
543 return _("filename contains '%s', which is reserved "
544 "on Windows") % c
544 "on Windows") % c
545 if ord(c) <= 31:
545 if ord(c) <= 31:
546 return _("filename contains %r, which is invalid "
546 return _("filename contains %r, which is invalid "
547 "on Windows") % c
547 "on Windows") % c
548 base = n.split('.')[0]
548 base = n.split('.')[0]
549 if base and base.lower() in _winreservednames:
549 if base and base.lower() in _winreservednames:
550 return _("filename contains '%s', which is reserved "
550 return _("filename contains '%s', which is reserved "
551 "on Windows") % base
551 "on Windows") % base
552 t = n[-1]
552 t = n[-1]
553 if t in '. ' and n not in '..':
553 if t in '. ' and n not in '..':
554 return _("filename ends with '%s', which is not allowed "
554 return _("filename ends with '%s', which is not allowed "
555 "on Windows") % t
555 "on Windows") % t
556
556
557 if os.name == 'nt':
557 if os.name == 'nt':
558 checkosfilename = checkwinfilename
558 checkosfilename = checkwinfilename
559 else:
559 else:
560 checkosfilename = platform.checkosfilename
560 checkosfilename = platform.checkosfilename
561
561
562 def makelock(info, pathname):
562 def makelock(info, pathname):
563 try:
563 try:
564 return os.symlink(info, pathname)
564 return os.symlink(info, pathname)
565 except OSError, why:
565 except OSError, why:
566 if why.errno == errno.EEXIST:
566 if why.errno == errno.EEXIST:
567 raise
567 raise
568 except AttributeError: # no symlink in os
568 except AttributeError: # no symlink in os
569 pass
569 pass
570
570
571 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
571 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
572 os.write(ld, info)
572 os.write(ld, info)
573 os.close(ld)
573 os.close(ld)
574
574
575 def readlock(pathname):
575 def readlock(pathname):
576 try:
576 try:
577 return os.readlink(pathname)
577 return os.readlink(pathname)
578 except OSError, why:
578 except OSError, why:
579 if why.errno not in (errno.EINVAL, errno.ENOSYS):
579 if why.errno not in (errno.EINVAL, errno.ENOSYS):
580 raise
580 raise
581 except AttributeError: # no symlink in os
581 except AttributeError: # no symlink in os
582 pass
582 pass
583 fp = posixfile(pathname)
583 fp = posixfile(pathname)
584 r = fp.read()
584 r = fp.read()
585 fp.close()
585 fp.close()
586 return r
586 return r
587
587
588 def fstat(fp):
588 def fstat(fp):
589 '''stat file object that may not have fileno method.'''
589 '''stat file object that may not have fileno method.'''
590 try:
590 try:
591 return os.fstat(fp.fileno())
591 return os.fstat(fp.fileno())
592 except AttributeError:
592 except AttributeError:
593 return os.stat(fp.name)
593 return os.stat(fp.name)
594
594
595 # File system features
595 # File system features
596
596
597 def checkcase(path):
597 def checkcase(path):
598 """
598 """
599 Check whether the given path is on a case-sensitive filesystem
599 Check whether the given path is on a case-sensitive filesystem
600
600
601 Requires a path (like /foo/.hg) ending with a foldable final
601 Requires a path (like /foo/.hg) ending with a foldable final
602 directory component.
602 directory component.
603 """
603 """
604 s1 = os.stat(path)
604 s1 = os.stat(path)
605 d, b = os.path.split(path)
605 d, b = os.path.split(path)
606 b2 = b.upper()
606 b2 = b.upper()
607 if b == b2:
607 if b == b2:
608 b2 = b.lower()
608 b2 = b.lower()
609 if b == b2:
609 if b == b2:
610 return True # no evidence against case sensitivity
610 return True # no evidence against case sensitivity
611 p2 = os.path.join(d, b2)
611 p2 = os.path.join(d, b2)
612 try:
612 try:
613 s2 = os.stat(p2)
613 s2 = os.stat(p2)
614 if s2 == s1:
614 if s2 == s1:
615 return False
615 return False
616 return True
616 return True
617 except OSError:
617 except OSError:
618 return True
618 return True
619
619
620 _fspathcache = {}
620 _fspathcache = {}
621 def fspath(name, root):
621 def fspath(name, root):
622 '''Get name in the case stored in the filesystem
622 '''Get name in the case stored in the filesystem
623
623
624 The name should be relative to root, and be normcase-ed for efficiency.
624 The name should be relative to root, and be normcase-ed for efficiency.
625
625
626 Note that this function is unnecessary, and should not be
626 Note that this function is unnecessary, and should not be
627 called, for case-sensitive filesystems (simply because it's expensive).
627 called, for case-sensitive filesystems (simply because it's expensive).
628
628
629 The root should be normcase-ed, too.
629 The root should be normcase-ed, too.
630 '''
630 '''
631 def find(p, contents):
631 def find(p, contents):
632 for n in contents:
632 for n in contents:
633 if normcase(n) == p:
633 if normcase(n) == p:
634 return n
634 return n
635 return None
635 return None
636
636
637 seps = os.sep
637 seps = os.sep
638 if os.altsep:
638 if os.altsep:
639 seps = seps + os.altsep
639 seps = seps + os.altsep
640 # Protect backslashes. This gets silly very quickly.
640 # Protect backslashes. This gets silly very quickly.
641 seps.replace('\\','\\\\')
641 seps.replace('\\','\\\\')
642 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
642 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
643 dir = os.path.normpath(root)
643 dir = os.path.normpath(root)
644 result = []
644 result = []
645 for part, sep in pattern.findall(name):
645 for part, sep in pattern.findall(name):
646 if sep:
646 if sep:
647 result.append(sep)
647 result.append(sep)
648 continue
648 continue
649
649
650 if dir not in _fspathcache:
650 if dir not in _fspathcache:
651 _fspathcache[dir] = os.listdir(dir)
651 _fspathcache[dir] = os.listdir(dir)
652 contents = _fspathcache[dir]
652 contents = _fspathcache[dir]
653
653
654 found = find(part, contents)
654 found = find(part, contents)
655 if not found:
655 if not found:
656 # retry "once per directory" per "dirstate.walk" which
656 # retry "once per directory" per "dirstate.walk" which
657 # may take place for each patches of "hg qpush", for example
657 # may take place for each patches of "hg qpush", for example
658 contents = os.listdir(dir)
658 contents = os.listdir(dir)
659 _fspathcache[dir] = contents
659 _fspathcache[dir] = contents
660 found = find(part, contents)
660 found = find(part, contents)
661
661
662 result.append(found or part)
662 result.append(found or part)
663 dir = os.path.join(dir, part)
663 dir = os.path.join(dir, part)
664
664
665 return ''.join(result)
665 return ''.join(result)
666
666
667 def checknlink(testfile):
667 def checknlink(testfile):
668 '''check whether hardlink count reporting works properly'''
668 '''check whether hardlink count reporting works properly'''
669
669
670 # testfile may be open, so we need a separate file for checking to
670 # testfile may be open, so we need a separate file for checking to
671 # work around issue2543 (or testfile may get lost on Samba shares)
671 # work around issue2543 (or testfile may get lost on Samba shares)
672 f1 = testfile + ".hgtmp1"
672 f1 = testfile + ".hgtmp1"
673 if os.path.lexists(f1):
673 if os.path.lexists(f1):
674 return False
674 return False
675 try:
675 try:
676 posixfile(f1, 'w').close()
676 posixfile(f1, 'w').close()
677 except IOError:
677 except IOError:
678 return False
678 return False
679
679
680 f2 = testfile + ".hgtmp2"
680 f2 = testfile + ".hgtmp2"
681 fd = None
681 fd = None
682 try:
682 try:
683 try:
683 try:
684 oslink(f1, f2)
684 oslink(f1, f2)
685 except OSError:
685 except OSError:
686 return False
686 return False
687
687
688 # nlinks() may behave differently for files on Windows shares if
688 # nlinks() may behave differently for files on Windows shares if
689 # the file is open.
689 # the file is open.
690 fd = posixfile(f2)
690 fd = posixfile(f2)
691 return nlinks(f2) > 1
691 return nlinks(f2) > 1
692 finally:
692 finally:
693 if fd is not None:
693 if fd is not None:
694 fd.close()
694 fd.close()
695 for f in (f1, f2):
695 for f in (f1, f2):
696 try:
696 try:
697 os.unlink(f)
697 os.unlink(f)
698 except OSError:
698 except OSError:
699 pass
699 pass
700
700
701 return False
701 return False
702
702
703 def endswithsep(path):
703 def endswithsep(path):
704 '''Check path ends with os.sep or os.altsep.'''
704 '''Check path ends with os.sep or os.altsep.'''
705 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
705 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
706
706
707 def splitpath(path):
707 def splitpath(path):
708 '''Split path by os.sep.
708 '''Split path by os.sep.
709 Note that this function does not use os.altsep because this is
709 Note that this function does not use os.altsep because this is
710 an alternative of simple "xxx.split(os.sep)".
710 an alternative of simple "xxx.split(os.sep)".
711 It is recommended to use os.path.normpath() before using this
711 It is recommended to use os.path.normpath() before using this
712 function if need.'''
712 function if need.'''
713 return path.split(os.sep)
713 return path.split(os.sep)
714
714
715 def gui():
715 def gui():
716 '''Are we running in a GUI?'''
716 '''Are we running in a GUI?'''
717 if sys.platform == 'darwin':
717 if sys.platform == 'darwin':
718 if 'SSH_CONNECTION' in os.environ:
718 if 'SSH_CONNECTION' in os.environ:
719 # handle SSH access to a box where the user is logged in
719 # handle SSH access to a box where the user is logged in
720 return False
720 return False
721 elif getattr(osutil, 'isgui', None):
721 elif getattr(osutil, 'isgui', None):
722 # check if a CoreGraphics session is available
722 # check if a CoreGraphics session is available
723 return osutil.isgui()
723 return osutil.isgui()
724 else:
724 else:
725 # pure build; use a safe default
725 # pure build; use a safe default
726 return True
726 return True
727 else:
727 else:
728 return os.name == "nt" or os.environ.get("DISPLAY")
728 return os.name == "nt" or os.environ.get("DISPLAY")
729
729
730 def mktempcopy(name, emptyok=False, createmode=None):
730 def mktempcopy(name, emptyok=False, createmode=None):
731 """Create a temporary file with the same contents from name
731 """Create a temporary file with the same contents from name
732
732
733 The permission bits are copied from the original file.
733 The permission bits are copied from the original file.
734
734
735 If the temporary file is going to be truncated immediately, you
735 If the temporary file is going to be truncated immediately, you
736 can use emptyok=True as an optimization.
736 can use emptyok=True as an optimization.
737
737
738 Returns the name of the temporary file.
738 Returns the name of the temporary file.
739 """
739 """
740 d, fn = os.path.split(name)
740 d, fn = os.path.split(name)
741 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
741 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
742 os.close(fd)
742 os.close(fd)
743 # Temporary files are created with mode 0600, which is usually not
743 # Temporary files are created with mode 0600, which is usually not
744 # what we want. If the original file already exists, just copy
744 # what we want. If the original file already exists, just copy
745 # its mode. Otherwise, manually obey umask.
745 # its mode. Otherwise, manually obey umask.
746 copymode(name, temp, createmode)
746 copymode(name, temp, createmode)
747 if emptyok:
747 if emptyok:
748 return temp
748 return temp
749 try:
749 try:
750 try:
750 try:
751 ifp = posixfile(name, "rb")
751 ifp = posixfile(name, "rb")
752 except IOError, inst:
752 except IOError, inst:
753 if inst.errno == errno.ENOENT:
753 if inst.errno == errno.ENOENT:
754 return temp
754 return temp
755 if not getattr(inst, 'filename', None):
755 if not getattr(inst, 'filename', None):
756 inst.filename = name
756 inst.filename = name
757 raise
757 raise
758 ofp = posixfile(temp, "wb")
758 ofp = posixfile(temp, "wb")
759 for chunk in filechunkiter(ifp):
759 for chunk in filechunkiter(ifp):
760 ofp.write(chunk)
760 ofp.write(chunk)
761 ifp.close()
761 ifp.close()
762 ofp.close()
762 ofp.close()
763 except:
763 except:
764 try: os.unlink(temp)
764 try: os.unlink(temp)
765 except: pass
765 except: pass
766 raise
766 raise
767 return temp
767 return temp
768
768
769 class atomictempfile(object):
769 class atomictempfile(object):
770 '''writeable file object that atomically updates a file
770 '''writeable file object that atomically updates a file
771
771
772 All writes will go to a temporary copy of the original file. Call
772 All writes will go to a temporary copy of the original file. Call
773 close() when you are done writing, and atomictempfile will rename
773 close() when you are done writing, and atomictempfile will rename
774 the temporary copy to the original name, making the changes
774 the temporary copy to the original name, making the changes
775 visible. If the object is destroyed without being closed, all your
775 visible. If the object is destroyed without being closed, all your
776 writes are discarded.
776 writes are discarded.
777 '''
777 '''
778 def __init__(self, name, mode='w+b', createmode=None):
778 def __init__(self, name, mode='w+b', createmode=None):
779 self.__name = name # permanent name
779 self.__name = name # permanent name
780 self._tempname = mktempcopy(name, emptyok=('w' in mode),
780 self._tempname = mktempcopy(name, emptyok=('w' in mode),
781 createmode=createmode)
781 createmode=createmode)
782 self._fp = posixfile(self._tempname, mode)
782 self._fp = posixfile(self._tempname, mode)
783
783
784 # delegated methods
784 # delegated methods
785 self.write = self._fp.write
785 self.write = self._fp.write
786 self.fileno = self._fp.fileno
786 self.fileno = self._fp.fileno
787
787
788 def close(self):
788 def close(self):
789 if not self._fp.closed:
789 if not self._fp.closed:
790 self._fp.close()
790 self._fp.close()
791 rename(self._tempname, localpath(self.__name))
791 rename(self._tempname, localpath(self.__name))
792
792
793 def discard(self):
793 def discard(self):
794 if not self._fp.closed:
794 if not self._fp.closed:
795 try:
795 try:
796 os.unlink(self._tempname)
796 os.unlink(self._tempname)
797 except OSError:
797 except OSError:
798 pass
798 pass
799 self._fp.close()
799 self._fp.close()
800
800
801 def __del__(self):
801 def __del__(self):
802 if safehasattr(self, '_fp'): # constructor actually did something
802 if safehasattr(self, '_fp'): # constructor actually did something
803 self.discard()
803 self.discard()
804
804
805 def makedirs(name, mode=None):
805 def makedirs(name, mode=None):
806 """recursive directory creation with parent mode inheritance"""
806 """recursive directory creation with parent mode inheritance"""
807 try:
807 try:
808 os.mkdir(name)
808 os.mkdir(name)
809 except OSError, err:
809 except OSError, err:
810 if err.errno == errno.EEXIST:
810 if err.errno == errno.EEXIST:
811 return
811 return
812 if err.errno != errno.ENOENT or not name:
812 if err.errno != errno.ENOENT or not name:
813 raise
813 raise
814 parent = os.path.dirname(os.path.abspath(name))
814 parent = os.path.dirname(os.path.abspath(name))
815 if parent == name:
815 if parent == name:
816 raise
816 raise
817 makedirs(parent, mode)
817 makedirs(parent, mode)
818 os.mkdir(name)
818 os.mkdir(name)
819 if mode is not None:
819 if mode is not None:
820 os.chmod(name, mode)
820 os.chmod(name, mode)
821
821
822 def readfile(path):
822 def readfile(path):
823 fp = open(path, 'rb')
823 fp = open(path, 'rb')
824 try:
824 try:
825 return fp.read()
825 return fp.read()
826 finally:
826 finally:
827 fp.close()
827 fp.close()
828
828
829 def writefile(path, text):
829 def writefile(path, text):
830 fp = open(path, 'wb')
830 fp = open(path, 'wb')
831 try:
831 try:
832 fp.write(text)
832 fp.write(text)
833 finally:
833 finally:
834 fp.close()
834 fp.close()
835
835
836 def appendfile(path, text):
836 def appendfile(path, text):
837 fp = open(path, 'ab')
837 fp = open(path, 'ab')
838 try:
838 try:
839 fp.write(text)
839 fp.write(text)
840 finally:
840 finally:
841 fp.close()
841 fp.close()
842
842
843 class chunkbuffer(object):
843 class chunkbuffer(object):
844 """Allow arbitrary sized chunks of data to be efficiently read from an
844 """Allow arbitrary sized chunks of data to be efficiently read from an
845 iterator over chunks of arbitrary size."""
845 iterator over chunks of arbitrary size."""
846
846
847 def __init__(self, in_iter):
847 def __init__(self, in_iter):
848 """in_iter is the iterator that's iterating over the input chunks.
848 """in_iter is the iterator that's iterating over the input chunks.
849 targetsize is how big a buffer to try to maintain."""
849 targetsize is how big a buffer to try to maintain."""
850 def splitbig(chunks):
850 def splitbig(chunks):
851 for chunk in chunks:
851 for chunk in chunks:
852 if len(chunk) > 2**20:
852 if len(chunk) > 2**20:
853 pos = 0
853 pos = 0
854 while pos < len(chunk):
854 while pos < len(chunk):
855 end = pos + 2 ** 18
855 end = pos + 2 ** 18
856 yield chunk[pos:end]
856 yield chunk[pos:end]
857 pos = end
857 pos = end
858 else:
858 else:
859 yield chunk
859 yield chunk
860 self.iter = splitbig(in_iter)
860 self.iter = splitbig(in_iter)
861 self._queue = []
861 self._queue = []
862
862
863 def read(self, l):
863 def read(self, l):
864 """Read L bytes of data from the iterator of chunks of data.
864 """Read L bytes of data from the iterator of chunks of data.
865 Returns less than L bytes if the iterator runs dry."""
865 Returns less than L bytes if the iterator runs dry."""
866 left = l
866 left = l
867 buf = ''
867 buf = ''
868 queue = self._queue
868 queue = self._queue
869 while left > 0:
869 while left > 0:
870 # refill the queue
870 # refill the queue
871 if not queue:
871 if not queue:
872 target = 2**18
872 target = 2**18
873 for chunk in self.iter:
873 for chunk in self.iter:
874 queue.append(chunk)
874 queue.append(chunk)
875 target -= len(chunk)
875 target -= len(chunk)
876 if target <= 0:
876 if target <= 0:
877 break
877 break
878 if not queue:
878 if not queue:
879 break
879 break
880
880
881 chunk = queue.pop(0)
881 chunk = queue.pop(0)
882 left -= len(chunk)
882 left -= len(chunk)
883 if left < 0:
883 if left < 0:
884 queue.insert(0, chunk[left:])
884 queue.insert(0, chunk[left:])
885 buf += chunk[:left]
885 buf += chunk[:left]
886 else:
886 else:
887 buf += chunk
887 buf += chunk
888
888
889 return buf
889 return buf
890
890
891 def filechunkiter(f, size=65536, limit=None):
891 def filechunkiter(f, size=65536, limit=None):
892 """Create a generator that produces the data in the file size
892 """Create a generator that produces the data in the file size
893 (default 65536) bytes at a time, up to optional limit (default is
893 (default 65536) bytes at a time, up to optional limit (default is
894 to read all data). Chunks may be less than size bytes if the
894 to read all data). Chunks may be less than size bytes if the
895 chunk is the last chunk in the file, or the file is a socket or
895 chunk is the last chunk in the file, or the file is a socket or
896 some other type of file that sometimes reads less data than is
896 some other type of file that sometimes reads less data than is
897 requested."""
897 requested."""
898 assert size >= 0
898 assert size >= 0
899 assert limit is None or limit >= 0
899 assert limit is None or limit >= 0
900 while True:
900 while True:
901 if limit is None:
901 if limit is None:
902 nbytes = size
902 nbytes = size
903 else:
903 else:
904 nbytes = min(limit, size)
904 nbytes = min(limit, size)
905 s = nbytes and f.read(nbytes)
905 s = nbytes and f.read(nbytes)
906 if not s:
906 if not s:
907 break
907 break
908 if limit:
908 if limit:
909 limit -= len(s)
909 limit -= len(s)
910 yield s
910 yield s
911
911
912 def makedate():
912 def makedate():
913 ct = time.time()
913 ct = time.time()
914 if ct < 0:
914 if ct < 0:
915 hint = _("check your clock")
915 hint = _("check your clock")
916 raise Abort(_("negative timestamp: %d") % ct, hint=hint)
916 raise Abort(_("negative timestamp: %d") % ct, hint=hint)
917 delta = (datetime.datetime.utcfromtimestamp(ct) -
917 delta = (datetime.datetime.utcfromtimestamp(ct) -
918 datetime.datetime.fromtimestamp(ct))
918 datetime.datetime.fromtimestamp(ct))
919 tz = delta.days * 86400 + delta.seconds
919 tz = delta.days * 86400 + delta.seconds
920 return ct, tz
920 return ct, tz
921
921
922 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
922 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
923 """represent a (unixtime, offset) tuple as a localized time.
923 """represent a (unixtime, offset) tuple as a localized time.
924 unixtime is seconds since the epoch, and offset is the time zone's
924 unixtime is seconds since the epoch, and offset is the time zone's
925 number of seconds away from UTC. if timezone is false, do not
925 number of seconds away from UTC. if timezone is false, do not
926 append time zone to string."""
926 append time zone to string."""
927 t, tz = date or makedate()
927 t, tz = date or makedate()
928 if t < 0:
928 if t < 0:
929 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
929 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
930 tz = 0
930 tz = 0
931 if "%1" in format or "%2" in format:
931 if "%1" in format or "%2" in format:
932 sign = (tz > 0) and "-" or "+"
932 sign = (tz > 0) and "-" or "+"
933 minutes = abs(tz) // 60
933 minutes = abs(tz) // 60
934 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
934 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
935 format = format.replace("%2", "%02d" % (minutes % 60))
935 format = format.replace("%2", "%02d" % (minutes % 60))
936 try:
936 try:
937 t = time.gmtime(float(t) - tz)
937 t = time.gmtime(float(t) - tz)
938 except ValueError:
938 except ValueError:
939 # time was out of range
939 # time was out of range
940 t = time.gmtime(sys.maxint)
940 t = time.gmtime(sys.maxint)
941 s = time.strftime(format, t)
941 s = time.strftime(format, t)
942 return s
942 return s
943
943
944 def shortdate(date=None):
944 def shortdate(date=None):
945 """turn (timestamp, tzoff) tuple into iso 8631 date."""
945 """turn (timestamp, tzoff) tuple into iso 8631 date."""
946 return datestr(date, format='%Y-%m-%d')
946 return datestr(date, format='%Y-%m-%d')
947
947
948 def strdate(string, format, defaults=[]):
948 def strdate(string, format, defaults=[]):
949 """parse a localized time string and return a (unixtime, offset) tuple.
949 """parse a localized time string and return a (unixtime, offset) tuple.
950 if the string cannot be parsed, ValueError is raised."""
950 if the string cannot be parsed, ValueError is raised."""
951 def timezone(string):
951 def timezone(string):
952 tz = string.split()[-1]
952 tz = string.split()[-1]
953 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
953 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
954 sign = (tz[0] == "+") and 1 or -1
954 sign = (tz[0] == "+") and 1 or -1
955 hours = int(tz[1:3])
955 hours = int(tz[1:3])
956 minutes = int(tz[3:5])
956 minutes = int(tz[3:5])
957 return -sign * (hours * 60 + minutes) * 60
957 return -sign * (hours * 60 + minutes) * 60
958 if tz == "GMT" or tz == "UTC":
958 if tz == "GMT" or tz == "UTC":
959 return 0
959 return 0
960 return None
960 return None
961
961
962 # NOTE: unixtime = localunixtime + offset
962 # NOTE: unixtime = localunixtime + offset
963 offset, date = timezone(string), string
963 offset, date = timezone(string), string
964 if offset is not None:
964 if offset is not None:
965 date = " ".join(string.split()[:-1])
965 date = " ".join(string.split()[:-1])
966
966
967 # add missing elements from defaults
967 # add missing elements from defaults
968 usenow = False # default to using biased defaults
968 usenow = False # default to using biased defaults
969 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
969 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
970 found = [True for p in part if ("%"+p) in format]
970 found = [True for p in part if ("%"+p) in format]
971 if not found:
971 if not found:
972 date += "@" + defaults[part][usenow]
972 date += "@" + defaults[part][usenow]
973 format += "@%" + part[0]
973 format += "@%" + part[0]
974 else:
974 else:
975 # We've found a specific time element, less specific time
975 # We've found a specific time element, less specific time
976 # elements are relative to today
976 # elements are relative to today
977 usenow = True
977 usenow = True
978
978
979 timetuple = time.strptime(date, format)
979 timetuple = time.strptime(date, format)
980 localunixtime = int(calendar.timegm(timetuple))
980 localunixtime = int(calendar.timegm(timetuple))
981 if offset is None:
981 if offset is None:
982 # local timezone
982 # local timezone
983 unixtime = int(time.mktime(timetuple))
983 unixtime = int(time.mktime(timetuple))
984 offset = unixtime - localunixtime
984 offset = unixtime - localunixtime
985 else:
985 else:
986 unixtime = localunixtime + offset
986 unixtime = localunixtime + offset
987 return unixtime, offset
987 return unixtime, offset
988
988
989 def parsedate(date, formats=None, bias={}):
989 def parsedate(date, formats=None, bias={}):
990 """parse a localized date/time and return a (unixtime, offset) tuple.
990 """parse a localized date/time and return a (unixtime, offset) tuple.
991
991
992 The date may be a "unixtime offset" string or in one of the specified
992 The date may be a "unixtime offset" string or in one of the specified
993 formats. If the date already is a (unixtime, offset) tuple, it is returned.
993 formats. If the date already is a (unixtime, offset) tuple, it is returned.
994 """
994 """
995 if not date:
995 if not date:
996 return 0, 0
996 return 0, 0
997 if isinstance(date, tuple) and len(date) == 2:
997 if isinstance(date, tuple) and len(date) == 2:
998 return date
998 return date
999 if not formats:
999 if not formats:
1000 formats = defaultdateformats
1000 formats = defaultdateformats
1001 date = date.strip()
1001 date = date.strip()
1002 try:
1002 try:
1003 when, offset = map(int, date.split(' '))
1003 when, offset = map(int, date.split(' '))
1004 except ValueError:
1004 except ValueError:
1005 # fill out defaults
1005 # fill out defaults
1006 now = makedate()
1006 now = makedate()
1007 defaults = {}
1007 defaults = {}
1008 for part in ("d", "mb", "yY", "HI", "M", "S"):
1008 for part in ("d", "mb", "yY", "HI", "M", "S"):
1009 # this piece is for rounding the specific end of unknowns
1009 # this piece is for rounding the specific end of unknowns
1010 b = bias.get(part)
1010 b = bias.get(part)
1011 if b is None:
1011 if b is None:
1012 if part[0] in "HMS":
1012 if part[0] in "HMS":
1013 b = "00"
1013 b = "00"
1014 else:
1014 else:
1015 b = "0"
1015 b = "0"
1016
1016
1017 # this piece is for matching the generic end to today's date
1017 # this piece is for matching the generic end to today's date
1018 n = datestr(now, "%" + part[0])
1018 n = datestr(now, "%" + part[0])
1019
1019
1020 defaults[part] = (b, n)
1020 defaults[part] = (b, n)
1021
1021
1022 for format in formats:
1022 for format in formats:
1023 try:
1023 try:
1024 when, offset = strdate(date, format, defaults)
1024 when, offset = strdate(date, format, defaults)
1025 except (ValueError, OverflowError):
1025 except (ValueError, OverflowError):
1026 pass
1026 pass
1027 else:
1027 else:
1028 break
1028 break
1029 else:
1029 else:
1030 raise Abort(_('invalid date: %r') % date)
1030 raise Abort(_('invalid date: %r') % date)
1031 # validate explicit (probably user-specified) date and
1031 # validate explicit (probably user-specified) date and
1032 # time zone offset. values must fit in signed 32 bits for
1032 # time zone offset. values must fit in signed 32 bits for
1033 # current 32-bit linux runtimes. timezones go from UTC-12
1033 # current 32-bit linux runtimes. timezones go from UTC-12
1034 # to UTC+14
1034 # to UTC+14
1035 if abs(when) > 0x7fffffff:
1035 if abs(when) > 0x7fffffff:
1036 raise Abort(_('date exceeds 32 bits: %d') % when)
1036 raise Abort(_('date exceeds 32 bits: %d') % when)
1037 if when < 0:
1037 if when < 0:
1038 raise Abort(_('negative date value: %d') % when)
1038 raise Abort(_('negative date value: %d') % when)
1039 if offset < -50400 or offset > 43200:
1039 if offset < -50400 or offset > 43200:
1040 raise Abort(_('impossible time zone offset: %d') % offset)
1040 raise Abort(_('impossible time zone offset: %d') % offset)
1041 return when, offset
1041 return when, offset
1042
1042
1043 def matchdate(date):
1043 def matchdate(date):
1044 """Return a function that matches a given date match specifier
1044 """Return a function that matches a given date match specifier
1045
1045
1046 Formats include:
1046 Formats include:
1047
1047
1048 '{date}' match a given date to the accuracy provided
1048 '{date}' match a given date to the accuracy provided
1049
1049
1050 '<{date}' on or before a given date
1050 '<{date}' on or before a given date
1051
1051
1052 '>{date}' on or after a given date
1052 '>{date}' on or after a given date
1053
1053
1054 >>> p1 = parsedate("10:29:59")
1054 >>> p1 = parsedate("10:29:59")
1055 >>> p2 = parsedate("10:30:00")
1055 >>> p2 = parsedate("10:30:00")
1056 >>> p3 = parsedate("10:30:59")
1056 >>> p3 = parsedate("10:30:59")
1057 >>> p4 = parsedate("10:31:00")
1057 >>> p4 = parsedate("10:31:00")
1058 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1058 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1059 >>> f = matchdate("10:30")
1059 >>> f = matchdate("10:30")
1060 >>> f(p1[0])
1060 >>> f(p1[0])
1061 False
1061 False
1062 >>> f(p2[0])
1062 >>> f(p2[0])
1063 True
1063 True
1064 >>> f(p3[0])
1064 >>> f(p3[0])
1065 True
1065 True
1066 >>> f(p4[0])
1066 >>> f(p4[0])
1067 False
1067 False
1068 >>> f(p5[0])
1068 >>> f(p5[0])
1069 False
1069 False
1070 """
1070 """
1071
1071
1072 def lower(date):
1072 def lower(date):
1073 d = dict(mb="1", d="1")
1073 d = dict(mb="1", d="1")
1074 return parsedate(date, extendeddateformats, d)[0]
1074 return parsedate(date, extendeddateformats, d)[0]
1075
1075
1076 def upper(date):
1076 def upper(date):
1077 d = dict(mb="12", HI="23", M="59", S="59")
1077 d = dict(mb="12", HI="23", M="59", S="59")
1078 for days in ("31", "30", "29"):
1078 for days in ("31", "30", "29"):
1079 try:
1079 try:
1080 d["d"] = days
1080 d["d"] = days
1081 return parsedate(date, extendeddateformats, d)[0]
1081 return parsedate(date, extendeddateformats, d)[0]
1082 except:
1082 except Abort:
1083 pass
1083 pass
1084 d["d"] = "28"
1084 d["d"] = "28"
1085 return parsedate(date, extendeddateformats, d)[0]
1085 return parsedate(date, extendeddateformats, d)[0]
1086
1086
1087 date = date.strip()
1087 date = date.strip()
1088
1088
1089 if not date:
1089 if not date:
1090 raise Abort(_("dates cannot consist entirely of whitespace"))
1090 raise Abort(_("dates cannot consist entirely of whitespace"))
1091 elif date[0] == "<":
1091 elif date[0] == "<":
1092 if not date[1:]:
1092 if not date[1:]:
1093 raise Abort(_("invalid day spec, use '<DATE'"))
1093 raise Abort(_("invalid day spec, use '<DATE'"))
1094 when = upper(date[1:])
1094 when = upper(date[1:])
1095 return lambda x: x <= when
1095 return lambda x: x <= when
1096 elif date[0] == ">":
1096 elif date[0] == ">":
1097 if not date[1:]:
1097 if not date[1:]:
1098 raise Abort(_("invalid day spec, use '>DATE'"))
1098 raise Abort(_("invalid day spec, use '>DATE'"))
1099 when = lower(date[1:])
1099 when = lower(date[1:])
1100 return lambda x: x >= when
1100 return lambda x: x >= when
1101 elif date[0] == "-":
1101 elif date[0] == "-":
1102 try:
1102 try:
1103 days = int(date[1:])
1103 days = int(date[1:])
1104 except ValueError:
1104 except ValueError:
1105 raise Abort(_("invalid day spec: %s") % date[1:])
1105 raise Abort(_("invalid day spec: %s") % date[1:])
1106 if days < 0:
1106 if days < 0:
1107 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1107 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1108 % date[1:])
1108 % date[1:])
1109 when = makedate()[0] - days * 3600 * 24
1109 when = makedate()[0] - days * 3600 * 24
1110 return lambda x: x >= when
1110 return lambda x: x >= when
1111 elif " to " in date:
1111 elif " to " in date:
1112 a, b = date.split(" to ")
1112 a, b = date.split(" to ")
1113 start, stop = lower(a), upper(b)
1113 start, stop = lower(a), upper(b)
1114 return lambda x: x >= start and x <= stop
1114 return lambda x: x >= start and x <= stop
1115 else:
1115 else:
1116 start, stop = lower(date), upper(date)
1116 start, stop = lower(date), upper(date)
1117 return lambda x: x >= start and x <= stop
1117 return lambda x: x >= start and x <= stop
1118
1118
1119 def shortuser(user):
1119 def shortuser(user):
1120 """Return a short representation of a user name or email address."""
1120 """Return a short representation of a user name or email address."""
1121 f = user.find('@')
1121 f = user.find('@')
1122 if f >= 0:
1122 if f >= 0:
1123 user = user[:f]
1123 user = user[:f]
1124 f = user.find('<')
1124 f = user.find('<')
1125 if f >= 0:
1125 if f >= 0:
1126 user = user[f + 1:]
1126 user = user[f + 1:]
1127 f = user.find(' ')
1127 f = user.find(' ')
1128 if f >= 0:
1128 if f >= 0:
1129 user = user[:f]
1129 user = user[:f]
1130 f = user.find('.')
1130 f = user.find('.')
1131 if f >= 0:
1131 if f >= 0:
1132 user = user[:f]
1132 user = user[:f]
1133 return user
1133 return user
1134
1134
1135 def emailuser(user):
1135 def emailuser(user):
1136 """Return the user portion of an email address."""
1136 """Return the user portion of an email address."""
1137 f = user.find('@')
1137 f = user.find('@')
1138 if f >= 0:
1138 if f >= 0:
1139 user = user[:f]
1139 user = user[:f]
1140 f = user.find('<')
1140 f = user.find('<')
1141 if f >= 0:
1141 if f >= 0:
1142 user = user[f + 1:]
1142 user = user[f + 1:]
1143 return user
1143 return user
1144
1144
1145 def email(author):
1145 def email(author):
1146 '''get email of author.'''
1146 '''get email of author.'''
1147 r = author.find('>')
1147 r = author.find('>')
1148 if r == -1:
1148 if r == -1:
1149 r = None
1149 r = None
1150 return author[author.find('<') + 1:r]
1150 return author[author.find('<') + 1:r]
1151
1151
1152 def _ellipsis(text, maxlength):
1152 def _ellipsis(text, maxlength):
1153 if len(text) <= maxlength:
1153 if len(text) <= maxlength:
1154 return text, False
1154 return text, False
1155 else:
1155 else:
1156 return "%s..." % (text[:maxlength - 3]), True
1156 return "%s..." % (text[:maxlength - 3]), True
1157
1157
1158 def ellipsis(text, maxlength=400):
1158 def ellipsis(text, maxlength=400):
1159 """Trim string to at most maxlength (default: 400) characters."""
1159 """Trim string to at most maxlength (default: 400) characters."""
1160 try:
1160 try:
1161 # use unicode not to split at intermediate multi-byte sequence
1161 # use unicode not to split at intermediate multi-byte sequence
1162 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1162 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1163 maxlength)
1163 maxlength)
1164 if not truncated:
1164 if not truncated:
1165 return text
1165 return text
1166 return utext.encode(encoding.encoding)
1166 return utext.encode(encoding.encoding)
1167 except (UnicodeDecodeError, UnicodeEncodeError):
1167 except (UnicodeDecodeError, UnicodeEncodeError):
1168 return _ellipsis(text, maxlength)[0]
1168 return _ellipsis(text, maxlength)[0]
1169
1169
1170 _byteunits = (
1170 _byteunits = (
1171 (100, 1 << 30, _('%.0f GB')),
1171 (100, 1 << 30, _('%.0f GB')),
1172 (10, 1 << 30, _('%.1f GB')),
1172 (10, 1 << 30, _('%.1f GB')),
1173 (1, 1 << 30, _('%.2f GB')),
1173 (1, 1 << 30, _('%.2f GB')),
1174 (100, 1 << 20, _('%.0f MB')),
1174 (100, 1 << 20, _('%.0f MB')),
1175 (10, 1 << 20, _('%.1f MB')),
1175 (10, 1 << 20, _('%.1f MB')),
1176 (1, 1 << 20, _('%.2f MB')),
1176 (1, 1 << 20, _('%.2f MB')),
1177 (100, 1 << 10, _('%.0f KB')),
1177 (100, 1 << 10, _('%.0f KB')),
1178 (10, 1 << 10, _('%.1f KB')),
1178 (10, 1 << 10, _('%.1f KB')),
1179 (1, 1 << 10, _('%.2f KB')),
1179 (1, 1 << 10, _('%.2f KB')),
1180 (1, 1, _('%.0f bytes')),
1180 (1, 1, _('%.0f bytes')),
1181 )
1181 )
1182
1182
1183 def bytecount(nbytes):
1183 def bytecount(nbytes):
1184 '''return byte count formatted as readable string, with units'''
1184 '''return byte count formatted as readable string, with units'''
1185
1185
1186 for multiplier, divisor, format in _byteunits:
1186 for multiplier, divisor, format in _byteunits:
1187 if nbytes >= divisor * multiplier:
1187 if nbytes >= divisor * multiplier:
1188 return format % (nbytes / float(divisor))
1188 return format % (nbytes / float(divisor))
1189 return units[-1][2] % nbytes
1189 return units[-1][2] % nbytes
1190
1190
1191 def uirepr(s):
1191 def uirepr(s):
1192 # Avoid double backslash in Windows path repr()
1192 # Avoid double backslash in Windows path repr()
1193 return repr(s).replace('\\\\', '\\')
1193 return repr(s).replace('\\\\', '\\')
1194
1194
1195 # delay import of textwrap
1195 # delay import of textwrap
1196 def MBTextWrapper(**kwargs):
1196 def MBTextWrapper(**kwargs):
1197 class tw(textwrap.TextWrapper):
1197 class tw(textwrap.TextWrapper):
1198 """
1198 """
1199 Extend TextWrapper for width-awareness.
1199 Extend TextWrapper for width-awareness.
1200
1200
1201 Neither number of 'bytes' in any encoding nor 'characters' is
1201 Neither number of 'bytes' in any encoding nor 'characters' is
1202 appropriate to calculate terminal columns for specified string.
1202 appropriate to calculate terminal columns for specified string.
1203
1203
1204 Original TextWrapper implementation uses built-in 'len()' directly,
1204 Original TextWrapper implementation uses built-in 'len()' directly,
1205 so overriding is needed to use width information of each characters.
1205 so overriding is needed to use width information of each characters.
1206
1206
1207 In addition, characters classified into 'ambiguous' width are
1207 In addition, characters classified into 'ambiguous' width are
1208 treated as wide in east asian area, but as narrow in other.
1208 treated as wide in east asian area, but as narrow in other.
1209
1209
1210 This requires use decision to determine width of such characters.
1210 This requires use decision to determine width of such characters.
1211 """
1211 """
1212 def __init__(self, **kwargs):
1212 def __init__(self, **kwargs):
1213 textwrap.TextWrapper.__init__(self, **kwargs)
1213 textwrap.TextWrapper.__init__(self, **kwargs)
1214
1214
1215 # for compatibility between 2.4 and 2.6
1215 # for compatibility between 2.4 and 2.6
1216 if getattr(self, 'drop_whitespace', None) is None:
1216 if getattr(self, 'drop_whitespace', None) is None:
1217 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1217 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1218
1218
1219 def _cutdown(self, ucstr, space_left):
1219 def _cutdown(self, ucstr, space_left):
1220 l = 0
1220 l = 0
1221 colwidth = encoding.ucolwidth
1221 colwidth = encoding.ucolwidth
1222 for i in xrange(len(ucstr)):
1222 for i in xrange(len(ucstr)):
1223 l += colwidth(ucstr[i])
1223 l += colwidth(ucstr[i])
1224 if space_left < l:
1224 if space_left < l:
1225 return (ucstr[:i], ucstr[i:])
1225 return (ucstr[:i], ucstr[i:])
1226 return ucstr, ''
1226 return ucstr, ''
1227
1227
1228 # overriding of base class
1228 # overriding of base class
1229 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1229 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1230 space_left = max(width - cur_len, 1)
1230 space_left = max(width - cur_len, 1)
1231
1231
1232 if self.break_long_words:
1232 if self.break_long_words:
1233 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1233 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1234 cur_line.append(cut)
1234 cur_line.append(cut)
1235 reversed_chunks[-1] = res
1235 reversed_chunks[-1] = res
1236 elif not cur_line:
1236 elif not cur_line:
1237 cur_line.append(reversed_chunks.pop())
1237 cur_line.append(reversed_chunks.pop())
1238
1238
1239 # this overriding code is imported from TextWrapper of python 2.6
1239 # this overriding code is imported from TextWrapper of python 2.6
1240 # to calculate columns of string by 'encoding.ucolwidth()'
1240 # to calculate columns of string by 'encoding.ucolwidth()'
1241 def _wrap_chunks(self, chunks):
1241 def _wrap_chunks(self, chunks):
1242 colwidth = encoding.ucolwidth
1242 colwidth = encoding.ucolwidth
1243
1243
1244 lines = []
1244 lines = []
1245 if self.width <= 0:
1245 if self.width <= 0:
1246 raise ValueError("invalid width %r (must be > 0)" % self.width)
1246 raise ValueError("invalid width %r (must be > 0)" % self.width)
1247
1247
1248 # Arrange in reverse order so items can be efficiently popped
1248 # Arrange in reverse order so items can be efficiently popped
1249 # from a stack of chucks.
1249 # from a stack of chucks.
1250 chunks.reverse()
1250 chunks.reverse()
1251
1251
1252 while chunks:
1252 while chunks:
1253
1253
1254 # Start the list of chunks that will make up the current line.
1254 # Start the list of chunks that will make up the current line.
1255 # cur_len is just the length of all the chunks in cur_line.
1255 # cur_len is just the length of all the chunks in cur_line.
1256 cur_line = []
1256 cur_line = []
1257 cur_len = 0
1257 cur_len = 0
1258
1258
1259 # Figure out which static string will prefix this line.
1259 # Figure out which static string will prefix this line.
1260 if lines:
1260 if lines:
1261 indent = self.subsequent_indent
1261 indent = self.subsequent_indent
1262 else:
1262 else:
1263 indent = self.initial_indent
1263 indent = self.initial_indent
1264
1264
1265 # Maximum width for this line.
1265 # Maximum width for this line.
1266 width = self.width - len(indent)
1266 width = self.width - len(indent)
1267
1267
1268 # First chunk on line is whitespace -- drop it, unless this
1268 # First chunk on line is whitespace -- drop it, unless this
1269 # is the very beginning of the text (ie. no lines started yet).
1269 # is the very beginning of the text (ie. no lines started yet).
1270 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1270 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1271 del chunks[-1]
1271 del chunks[-1]
1272
1272
1273 while chunks:
1273 while chunks:
1274 l = colwidth(chunks[-1])
1274 l = colwidth(chunks[-1])
1275
1275
1276 # Can at least squeeze this chunk onto the current line.
1276 # Can at least squeeze this chunk onto the current line.
1277 if cur_len + l <= width:
1277 if cur_len + l <= width:
1278 cur_line.append(chunks.pop())
1278 cur_line.append(chunks.pop())
1279 cur_len += l
1279 cur_len += l
1280
1280
1281 # Nope, this line is full.
1281 # Nope, this line is full.
1282 else:
1282 else:
1283 break
1283 break
1284
1284
1285 # The current line is full, and the next chunk is too big to
1285 # The current line is full, and the next chunk is too big to
1286 # fit on *any* line (not just this one).
1286 # fit on *any* line (not just this one).
1287 if chunks and colwidth(chunks[-1]) > width:
1287 if chunks and colwidth(chunks[-1]) > width:
1288 self._handle_long_word(chunks, cur_line, cur_len, width)
1288 self._handle_long_word(chunks, cur_line, cur_len, width)
1289
1289
1290 # If the last chunk on this line is all whitespace, drop it.
1290 # If the last chunk on this line is all whitespace, drop it.
1291 if (self.drop_whitespace and
1291 if (self.drop_whitespace and
1292 cur_line and cur_line[-1].strip() == ''):
1292 cur_line and cur_line[-1].strip() == ''):
1293 del cur_line[-1]
1293 del cur_line[-1]
1294
1294
1295 # Convert current line back to a string and store it in list
1295 # Convert current line back to a string and store it in list
1296 # of all lines (return value).
1296 # of all lines (return value).
1297 if cur_line:
1297 if cur_line:
1298 lines.append(indent + ''.join(cur_line))
1298 lines.append(indent + ''.join(cur_line))
1299
1299
1300 return lines
1300 return lines
1301
1301
1302 global MBTextWrapper
1302 global MBTextWrapper
1303 MBTextWrapper = tw
1303 MBTextWrapper = tw
1304 return tw(**kwargs)
1304 return tw(**kwargs)
1305
1305
1306 def wrap(line, width, initindent='', hangindent=''):
1306 def wrap(line, width, initindent='', hangindent=''):
1307 maxindent = max(len(hangindent), len(initindent))
1307 maxindent = max(len(hangindent), len(initindent))
1308 if width <= maxindent:
1308 if width <= maxindent:
1309 # adjust for weird terminal size
1309 # adjust for weird terminal size
1310 width = max(78, maxindent + 1)
1310 width = max(78, maxindent + 1)
1311 line = line.decode(encoding.encoding, encoding.encodingmode)
1311 line = line.decode(encoding.encoding, encoding.encodingmode)
1312 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1312 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1313 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1313 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1314 wrapper = MBTextWrapper(width=width,
1314 wrapper = MBTextWrapper(width=width,
1315 initial_indent=initindent,
1315 initial_indent=initindent,
1316 subsequent_indent=hangindent)
1316 subsequent_indent=hangindent)
1317 return wrapper.fill(line).encode(encoding.encoding)
1317 return wrapper.fill(line).encode(encoding.encoding)
1318
1318
1319 def iterlines(iterator):
1319 def iterlines(iterator):
1320 for chunk in iterator:
1320 for chunk in iterator:
1321 for line in chunk.splitlines():
1321 for line in chunk.splitlines():
1322 yield line
1322 yield line
1323
1323
1324 def expandpath(path):
1324 def expandpath(path):
1325 return os.path.expanduser(os.path.expandvars(path))
1325 return os.path.expanduser(os.path.expandvars(path))
1326
1326
1327 def hgcmd():
1327 def hgcmd():
1328 """Return the command used to execute current hg
1328 """Return the command used to execute current hg
1329
1329
1330 This is different from hgexecutable() because on Windows we want
1330 This is different from hgexecutable() because on Windows we want
1331 to avoid things opening new shell windows like batch files, so we
1331 to avoid things opening new shell windows like batch files, so we
1332 get either the python call or current executable.
1332 get either the python call or current executable.
1333 """
1333 """
1334 if mainfrozen():
1334 if mainfrozen():
1335 return [sys.executable]
1335 return [sys.executable]
1336 return gethgcmd()
1336 return gethgcmd()
1337
1337
1338 def rundetached(args, condfn):
1338 def rundetached(args, condfn):
1339 """Execute the argument list in a detached process.
1339 """Execute the argument list in a detached process.
1340
1340
1341 condfn is a callable which is called repeatedly and should return
1341 condfn is a callable which is called repeatedly and should return
1342 True once the child process is known to have started successfully.
1342 True once the child process is known to have started successfully.
1343 At this point, the child process PID is returned. If the child
1343 At this point, the child process PID is returned. If the child
1344 process fails to start or finishes before condfn() evaluates to
1344 process fails to start or finishes before condfn() evaluates to
1345 True, return -1.
1345 True, return -1.
1346 """
1346 """
1347 # Windows case is easier because the child process is either
1347 # Windows case is easier because the child process is either
1348 # successfully starting and validating the condition or exiting
1348 # successfully starting and validating the condition or exiting
1349 # on failure. We just poll on its PID. On Unix, if the child
1349 # on failure. We just poll on its PID. On Unix, if the child
1350 # process fails to start, it will be left in a zombie state until
1350 # process fails to start, it will be left in a zombie state until
1351 # the parent wait on it, which we cannot do since we expect a long
1351 # the parent wait on it, which we cannot do since we expect a long
1352 # running process on success. Instead we listen for SIGCHLD telling
1352 # running process on success. Instead we listen for SIGCHLD telling
1353 # us our child process terminated.
1353 # us our child process terminated.
1354 terminated = set()
1354 terminated = set()
1355 def handler(signum, frame):
1355 def handler(signum, frame):
1356 terminated.add(os.wait())
1356 terminated.add(os.wait())
1357 prevhandler = None
1357 prevhandler = None
1358 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1358 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1359 if SIGCHLD is not None:
1359 if SIGCHLD is not None:
1360 prevhandler = signal.signal(SIGCHLD, handler)
1360 prevhandler = signal.signal(SIGCHLD, handler)
1361 try:
1361 try:
1362 pid = spawndetached(args)
1362 pid = spawndetached(args)
1363 while not condfn():
1363 while not condfn():
1364 if ((pid in terminated or not testpid(pid))
1364 if ((pid in terminated or not testpid(pid))
1365 and not condfn()):
1365 and not condfn()):
1366 return -1
1366 return -1
1367 time.sleep(0.1)
1367 time.sleep(0.1)
1368 return pid
1368 return pid
1369 finally:
1369 finally:
1370 if prevhandler is not None:
1370 if prevhandler is not None:
1371 signal.signal(signal.SIGCHLD, prevhandler)
1371 signal.signal(signal.SIGCHLD, prevhandler)
1372
1372
1373 try:
1373 try:
1374 any, all = any, all
1374 any, all = any, all
1375 except NameError:
1375 except NameError:
1376 def any(iterable):
1376 def any(iterable):
1377 for i in iterable:
1377 for i in iterable:
1378 if i:
1378 if i:
1379 return True
1379 return True
1380 return False
1380 return False
1381
1381
1382 def all(iterable):
1382 def all(iterable):
1383 for i in iterable:
1383 for i in iterable:
1384 if not i:
1384 if not i:
1385 return False
1385 return False
1386 return True
1386 return True
1387
1387
1388 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1388 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1389 """Return the result of interpolating items in the mapping into string s.
1389 """Return the result of interpolating items in the mapping into string s.
1390
1390
1391 prefix is a single character string, or a two character string with
1391 prefix is a single character string, or a two character string with
1392 a backslash as the first character if the prefix needs to be escaped in
1392 a backslash as the first character if the prefix needs to be escaped in
1393 a regular expression.
1393 a regular expression.
1394
1394
1395 fn is an optional function that will be applied to the replacement text
1395 fn is an optional function that will be applied to the replacement text
1396 just before replacement.
1396 just before replacement.
1397
1397
1398 escape_prefix is an optional flag that allows using doubled prefix for
1398 escape_prefix is an optional flag that allows using doubled prefix for
1399 its escaping.
1399 its escaping.
1400 """
1400 """
1401 fn = fn or (lambda s: s)
1401 fn = fn or (lambda s: s)
1402 patterns = '|'.join(mapping.keys())
1402 patterns = '|'.join(mapping.keys())
1403 if escape_prefix:
1403 if escape_prefix:
1404 patterns += '|' + prefix
1404 patterns += '|' + prefix
1405 if len(prefix) > 1:
1405 if len(prefix) > 1:
1406 prefix_char = prefix[1:]
1406 prefix_char = prefix[1:]
1407 else:
1407 else:
1408 prefix_char = prefix
1408 prefix_char = prefix
1409 mapping[prefix_char] = prefix_char
1409 mapping[prefix_char] = prefix_char
1410 r = re.compile(r'%s(%s)' % (prefix, patterns))
1410 r = re.compile(r'%s(%s)' % (prefix, patterns))
1411 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1411 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1412
1412
1413 def getport(port):
1413 def getport(port):
1414 """Return the port for a given network service.
1414 """Return the port for a given network service.
1415
1415
1416 If port is an integer, it's returned as is. If it's a string, it's
1416 If port is an integer, it's returned as is. If it's a string, it's
1417 looked up using socket.getservbyname(). If there's no matching
1417 looked up using socket.getservbyname(). If there's no matching
1418 service, util.Abort is raised.
1418 service, util.Abort is raised.
1419 """
1419 """
1420 try:
1420 try:
1421 return int(port)
1421 return int(port)
1422 except ValueError:
1422 except ValueError:
1423 pass
1423 pass
1424
1424
1425 try:
1425 try:
1426 return socket.getservbyname(port)
1426 return socket.getservbyname(port)
1427 except socket.error:
1427 except socket.error:
1428 raise Abort(_("no port number associated with service '%s'") % port)
1428 raise Abort(_("no port number associated with service '%s'") % port)
1429
1429
1430 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1430 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1431 '0': False, 'no': False, 'false': False, 'off': False,
1431 '0': False, 'no': False, 'false': False, 'off': False,
1432 'never': False}
1432 'never': False}
1433
1433
1434 def parsebool(s):
1434 def parsebool(s):
1435 """Parse s into a boolean.
1435 """Parse s into a boolean.
1436
1436
1437 If s is not a valid boolean, returns None.
1437 If s is not a valid boolean, returns None.
1438 """
1438 """
1439 return _booleans.get(s.lower(), None)
1439 return _booleans.get(s.lower(), None)
1440
1440
1441 _hexdig = '0123456789ABCDEFabcdef'
1441 _hexdig = '0123456789ABCDEFabcdef'
1442 _hextochr = dict((a + b, chr(int(a + b, 16)))
1442 _hextochr = dict((a + b, chr(int(a + b, 16)))
1443 for a in _hexdig for b in _hexdig)
1443 for a in _hexdig for b in _hexdig)
1444
1444
1445 def _urlunquote(s):
1445 def _urlunquote(s):
1446 """unquote('abc%20def') -> 'abc def'."""
1446 """unquote('abc%20def') -> 'abc def'."""
1447 res = s.split('%')
1447 res = s.split('%')
1448 # fastpath
1448 # fastpath
1449 if len(res) == 1:
1449 if len(res) == 1:
1450 return s
1450 return s
1451 s = res[0]
1451 s = res[0]
1452 for item in res[1:]:
1452 for item in res[1:]:
1453 try:
1453 try:
1454 s += _hextochr[item[:2]] + item[2:]
1454 s += _hextochr[item[:2]] + item[2:]
1455 except KeyError:
1455 except KeyError:
1456 s += '%' + item
1456 s += '%' + item
1457 except UnicodeDecodeError:
1457 except UnicodeDecodeError:
1458 s += unichr(int(item[:2], 16)) + item[2:]
1458 s += unichr(int(item[:2], 16)) + item[2:]
1459 return s
1459 return s
1460
1460
1461 class url(object):
1461 class url(object):
1462 r"""Reliable URL parser.
1462 r"""Reliable URL parser.
1463
1463
1464 This parses URLs and provides attributes for the following
1464 This parses URLs and provides attributes for the following
1465 components:
1465 components:
1466
1466
1467 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1467 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1468
1468
1469 Missing components are set to None. The only exception is
1469 Missing components are set to None. The only exception is
1470 fragment, which is set to '' if present but empty.
1470 fragment, which is set to '' if present but empty.
1471
1471
1472 If parsefragment is False, fragment is included in query. If
1472 If parsefragment is False, fragment is included in query. If
1473 parsequery is False, query is included in path. If both are
1473 parsequery is False, query is included in path. If both are
1474 False, both fragment and query are included in path.
1474 False, both fragment and query are included in path.
1475
1475
1476 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1476 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1477
1477
1478 Note that for backward compatibility reasons, bundle URLs do not
1478 Note that for backward compatibility reasons, bundle URLs do not
1479 take host names. That means 'bundle://../' has a path of '../'.
1479 take host names. That means 'bundle://../' has a path of '../'.
1480
1480
1481 Examples:
1481 Examples:
1482
1482
1483 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1483 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1484 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1484 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1485 >>> url('ssh://[::1]:2200//home/joe/repo')
1485 >>> url('ssh://[::1]:2200//home/joe/repo')
1486 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1486 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1487 >>> url('file:///home/joe/repo')
1487 >>> url('file:///home/joe/repo')
1488 <url scheme: 'file', path: '/home/joe/repo'>
1488 <url scheme: 'file', path: '/home/joe/repo'>
1489 >>> url('file:///c:/temp/foo/')
1489 >>> url('file:///c:/temp/foo/')
1490 <url scheme: 'file', path: 'c:/temp/foo/'>
1490 <url scheme: 'file', path: 'c:/temp/foo/'>
1491 >>> url('bundle:foo')
1491 >>> url('bundle:foo')
1492 <url scheme: 'bundle', path: 'foo'>
1492 <url scheme: 'bundle', path: 'foo'>
1493 >>> url('bundle://../foo')
1493 >>> url('bundle://../foo')
1494 <url scheme: 'bundle', path: '../foo'>
1494 <url scheme: 'bundle', path: '../foo'>
1495 >>> url(r'c:\foo\bar')
1495 >>> url(r'c:\foo\bar')
1496 <url path: 'c:\\foo\\bar'>
1496 <url path: 'c:\\foo\\bar'>
1497 >>> url(r'\\blah\blah\blah')
1497 >>> url(r'\\blah\blah\blah')
1498 <url path: '\\\\blah\\blah\\blah'>
1498 <url path: '\\\\blah\\blah\\blah'>
1499 >>> url(r'\\blah\blah\blah#baz')
1499 >>> url(r'\\blah\blah\blah#baz')
1500 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1500 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1501
1501
1502 Authentication credentials:
1502 Authentication credentials:
1503
1503
1504 >>> url('ssh://joe:xyz@x/repo')
1504 >>> url('ssh://joe:xyz@x/repo')
1505 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1505 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1506 >>> url('ssh://joe@x/repo')
1506 >>> url('ssh://joe@x/repo')
1507 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1507 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1508
1508
1509 Query strings and fragments:
1509 Query strings and fragments:
1510
1510
1511 >>> url('http://host/a?b#c')
1511 >>> url('http://host/a?b#c')
1512 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1512 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1513 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1513 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1514 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1514 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1515 """
1515 """
1516
1516
1517 _safechars = "!~*'()+"
1517 _safechars = "!~*'()+"
1518 _safepchars = "/!~*'()+:"
1518 _safepchars = "/!~*'()+:"
1519 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1519 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1520
1520
1521 def __init__(self, path, parsequery=True, parsefragment=True):
1521 def __init__(self, path, parsequery=True, parsefragment=True):
1522 # We slowly chomp away at path until we have only the path left
1522 # We slowly chomp away at path until we have only the path left
1523 self.scheme = self.user = self.passwd = self.host = None
1523 self.scheme = self.user = self.passwd = self.host = None
1524 self.port = self.path = self.query = self.fragment = None
1524 self.port = self.path = self.query = self.fragment = None
1525 self._localpath = True
1525 self._localpath = True
1526 self._hostport = ''
1526 self._hostport = ''
1527 self._origpath = path
1527 self._origpath = path
1528
1528
1529 if parsefragment and '#' in path:
1529 if parsefragment and '#' in path:
1530 path, self.fragment = path.split('#', 1)
1530 path, self.fragment = path.split('#', 1)
1531 if not path:
1531 if not path:
1532 path = None
1532 path = None
1533
1533
1534 # special case for Windows drive letters and UNC paths
1534 # special case for Windows drive letters and UNC paths
1535 if hasdriveletter(path) or path.startswith(r'\\'):
1535 if hasdriveletter(path) or path.startswith(r'\\'):
1536 self.path = path
1536 self.path = path
1537 return
1537 return
1538
1538
1539 # For compatibility reasons, we can't handle bundle paths as
1539 # For compatibility reasons, we can't handle bundle paths as
1540 # normal URLS
1540 # normal URLS
1541 if path.startswith('bundle:'):
1541 if path.startswith('bundle:'):
1542 self.scheme = 'bundle'
1542 self.scheme = 'bundle'
1543 path = path[7:]
1543 path = path[7:]
1544 if path.startswith('//'):
1544 if path.startswith('//'):
1545 path = path[2:]
1545 path = path[2:]
1546 self.path = path
1546 self.path = path
1547 return
1547 return
1548
1548
1549 if self._matchscheme(path):
1549 if self._matchscheme(path):
1550 parts = path.split(':', 1)
1550 parts = path.split(':', 1)
1551 if parts[0]:
1551 if parts[0]:
1552 self.scheme, path = parts
1552 self.scheme, path = parts
1553 self._localpath = False
1553 self._localpath = False
1554
1554
1555 if not path:
1555 if not path:
1556 path = None
1556 path = None
1557 if self._localpath:
1557 if self._localpath:
1558 self.path = ''
1558 self.path = ''
1559 return
1559 return
1560 else:
1560 else:
1561 if self._localpath:
1561 if self._localpath:
1562 self.path = path
1562 self.path = path
1563 return
1563 return
1564
1564
1565 if parsequery and '?' in path:
1565 if parsequery and '?' in path:
1566 path, self.query = path.split('?', 1)
1566 path, self.query = path.split('?', 1)
1567 if not path:
1567 if not path:
1568 path = None
1568 path = None
1569 if not self.query:
1569 if not self.query:
1570 self.query = None
1570 self.query = None
1571
1571
1572 # // is required to specify a host/authority
1572 # // is required to specify a host/authority
1573 if path and path.startswith('//'):
1573 if path and path.startswith('//'):
1574 parts = path[2:].split('/', 1)
1574 parts = path[2:].split('/', 1)
1575 if len(parts) > 1:
1575 if len(parts) > 1:
1576 self.host, path = parts
1576 self.host, path = parts
1577 path = path
1577 path = path
1578 else:
1578 else:
1579 self.host = parts[0]
1579 self.host = parts[0]
1580 path = None
1580 path = None
1581 if not self.host:
1581 if not self.host:
1582 self.host = None
1582 self.host = None
1583 # path of file:///d is /d
1583 # path of file:///d is /d
1584 # path of file:///d:/ is d:/, not /d:/
1584 # path of file:///d:/ is d:/, not /d:/
1585 if path and not hasdriveletter(path):
1585 if path and not hasdriveletter(path):
1586 path = '/' + path
1586 path = '/' + path
1587
1587
1588 if self.host and '@' in self.host:
1588 if self.host and '@' in self.host:
1589 self.user, self.host = self.host.rsplit('@', 1)
1589 self.user, self.host = self.host.rsplit('@', 1)
1590 if ':' in self.user:
1590 if ':' in self.user:
1591 self.user, self.passwd = self.user.split(':', 1)
1591 self.user, self.passwd = self.user.split(':', 1)
1592 if not self.host:
1592 if not self.host:
1593 self.host = None
1593 self.host = None
1594
1594
1595 # Don't split on colons in IPv6 addresses without ports
1595 # Don't split on colons in IPv6 addresses without ports
1596 if (self.host and ':' in self.host and
1596 if (self.host and ':' in self.host and
1597 not (self.host.startswith('[') and self.host.endswith(']'))):
1597 not (self.host.startswith('[') and self.host.endswith(']'))):
1598 self._hostport = self.host
1598 self._hostport = self.host
1599 self.host, self.port = self.host.rsplit(':', 1)
1599 self.host, self.port = self.host.rsplit(':', 1)
1600 if not self.host:
1600 if not self.host:
1601 self.host = None
1601 self.host = None
1602
1602
1603 if (self.host and self.scheme == 'file' and
1603 if (self.host and self.scheme == 'file' and
1604 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1604 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1605 raise Abort(_('file:// URLs can only refer to localhost'))
1605 raise Abort(_('file:// URLs can only refer to localhost'))
1606
1606
1607 self.path = path
1607 self.path = path
1608
1608
1609 # leave the query string escaped
1609 # leave the query string escaped
1610 for a in ('user', 'passwd', 'host', 'port',
1610 for a in ('user', 'passwd', 'host', 'port',
1611 'path', 'fragment'):
1611 'path', 'fragment'):
1612 v = getattr(self, a)
1612 v = getattr(self, a)
1613 if v is not None:
1613 if v is not None:
1614 setattr(self, a, _urlunquote(v))
1614 setattr(self, a, _urlunquote(v))
1615
1615
1616 def __repr__(self):
1616 def __repr__(self):
1617 attrs = []
1617 attrs = []
1618 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1618 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1619 'query', 'fragment'):
1619 'query', 'fragment'):
1620 v = getattr(self, a)
1620 v = getattr(self, a)
1621 if v is not None:
1621 if v is not None:
1622 attrs.append('%s: %r' % (a, v))
1622 attrs.append('%s: %r' % (a, v))
1623 return '<url %s>' % ', '.join(attrs)
1623 return '<url %s>' % ', '.join(attrs)
1624
1624
1625 def __str__(self):
1625 def __str__(self):
1626 r"""Join the URL's components back into a URL string.
1626 r"""Join the URL's components back into a URL string.
1627
1627
1628 Examples:
1628 Examples:
1629
1629
1630 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1630 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1631 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1631 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1632 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1632 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1633 'http://user:pw@host:80/?foo=bar&baz=42'
1633 'http://user:pw@host:80/?foo=bar&baz=42'
1634 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1634 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1635 'http://user:pw@host:80/?foo=bar%3dbaz'
1635 'http://user:pw@host:80/?foo=bar%3dbaz'
1636 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1636 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1637 'ssh://user:pw@[::1]:2200//home/joe#'
1637 'ssh://user:pw@[::1]:2200//home/joe#'
1638 >>> str(url('http://localhost:80//'))
1638 >>> str(url('http://localhost:80//'))
1639 'http://localhost:80//'
1639 'http://localhost:80//'
1640 >>> str(url('http://localhost:80/'))
1640 >>> str(url('http://localhost:80/'))
1641 'http://localhost:80/'
1641 'http://localhost:80/'
1642 >>> str(url('http://localhost:80'))
1642 >>> str(url('http://localhost:80'))
1643 'http://localhost:80/'
1643 'http://localhost:80/'
1644 >>> str(url('bundle:foo'))
1644 >>> str(url('bundle:foo'))
1645 'bundle:foo'
1645 'bundle:foo'
1646 >>> str(url('bundle://../foo'))
1646 >>> str(url('bundle://../foo'))
1647 'bundle:../foo'
1647 'bundle:../foo'
1648 >>> str(url('path'))
1648 >>> str(url('path'))
1649 'path'
1649 'path'
1650 >>> str(url('file:///tmp/foo/bar'))
1650 >>> str(url('file:///tmp/foo/bar'))
1651 'file:///tmp/foo/bar'
1651 'file:///tmp/foo/bar'
1652 >>> str(url('file:///c:/tmp/foo/bar'))
1652 >>> str(url('file:///c:/tmp/foo/bar'))
1653 'file:///c:/tmp/foo/bar'
1653 'file:///c:/tmp/foo/bar'
1654 >>> print url(r'bundle:foo\bar')
1654 >>> print url(r'bundle:foo\bar')
1655 bundle:foo\bar
1655 bundle:foo\bar
1656 """
1656 """
1657 if self._localpath:
1657 if self._localpath:
1658 s = self.path
1658 s = self.path
1659 if self.scheme == 'bundle':
1659 if self.scheme == 'bundle':
1660 s = 'bundle:' + s
1660 s = 'bundle:' + s
1661 if self.fragment:
1661 if self.fragment:
1662 s += '#' + self.fragment
1662 s += '#' + self.fragment
1663 return s
1663 return s
1664
1664
1665 s = self.scheme + ':'
1665 s = self.scheme + ':'
1666 if self.user or self.passwd or self.host:
1666 if self.user or self.passwd or self.host:
1667 s += '//'
1667 s += '//'
1668 elif self.scheme and (not self.path or self.path.startswith('/')
1668 elif self.scheme and (not self.path or self.path.startswith('/')
1669 or hasdriveletter(self.path)):
1669 or hasdriveletter(self.path)):
1670 s += '//'
1670 s += '//'
1671 if hasdriveletter(self.path):
1671 if hasdriveletter(self.path):
1672 s += '/'
1672 s += '/'
1673 if self.user:
1673 if self.user:
1674 s += urllib.quote(self.user, safe=self._safechars)
1674 s += urllib.quote(self.user, safe=self._safechars)
1675 if self.passwd:
1675 if self.passwd:
1676 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1676 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1677 if self.user or self.passwd:
1677 if self.user or self.passwd:
1678 s += '@'
1678 s += '@'
1679 if self.host:
1679 if self.host:
1680 if not (self.host.startswith('[') and self.host.endswith(']')):
1680 if not (self.host.startswith('[') and self.host.endswith(']')):
1681 s += urllib.quote(self.host)
1681 s += urllib.quote(self.host)
1682 else:
1682 else:
1683 s += self.host
1683 s += self.host
1684 if self.port:
1684 if self.port:
1685 s += ':' + urllib.quote(self.port)
1685 s += ':' + urllib.quote(self.port)
1686 if self.host:
1686 if self.host:
1687 s += '/'
1687 s += '/'
1688 if self.path:
1688 if self.path:
1689 # TODO: similar to the query string, we should not unescape the
1689 # TODO: similar to the query string, we should not unescape the
1690 # path when we store it, the path might contain '%2f' = '/',
1690 # path when we store it, the path might contain '%2f' = '/',
1691 # which we should *not* escape.
1691 # which we should *not* escape.
1692 s += urllib.quote(self.path, safe=self._safepchars)
1692 s += urllib.quote(self.path, safe=self._safepchars)
1693 if self.query:
1693 if self.query:
1694 # we store the query in escaped form.
1694 # we store the query in escaped form.
1695 s += '?' + self.query
1695 s += '?' + self.query
1696 if self.fragment is not None:
1696 if self.fragment is not None:
1697 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1697 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1698 return s
1698 return s
1699
1699
1700 def authinfo(self):
1700 def authinfo(self):
1701 user, passwd = self.user, self.passwd
1701 user, passwd = self.user, self.passwd
1702 try:
1702 try:
1703 self.user, self.passwd = None, None
1703 self.user, self.passwd = None, None
1704 s = str(self)
1704 s = str(self)
1705 finally:
1705 finally:
1706 self.user, self.passwd = user, passwd
1706 self.user, self.passwd = user, passwd
1707 if not self.user:
1707 if not self.user:
1708 return (s, None)
1708 return (s, None)
1709 # authinfo[1] is passed to urllib2 password manager, and its
1709 # authinfo[1] is passed to urllib2 password manager, and its
1710 # URIs must not contain credentials. The host is passed in the
1710 # URIs must not contain credentials. The host is passed in the
1711 # URIs list because Python < 2.4.3 uses only that to search for
1711 # URIs list because Python < 2.4.3 uses only that to search for
1712 # a password.
1712 # a password.
1713 return (s, (None, (s, self.host),
1713 return (s, (None, (s, self.host),
1714 self.user, self.passwd or ''))
1714 self.user, self.passwd or ''))
1715
1715
1716 def isabs(self):
1716 def isabs(self):
1717 if self.scheme and self.scheme != 'file':
1717 if self.scheme and self.scheme != 'file':
1718 return True # remote URL
1718 return True # remote URL
1719 if hasdriveletter(self.path):
1719 if hasdriveletter(self.path):
1720 return True # absolute for our purposes - can't be joined()
1720 return True # absolute for our purposes - can't be joined()
1721 if self.path.startswith(r'\\'):
1721 if self.path.startswith(r'\\'):
1722 return True # Windows UNC path
1722 return True # Windows UNC path
1723 if self.path.startswith('/'):
1723 if self.path.startswith('/'):
1724 return True # POSIX-style
1724 return True # POSIX-style
1725 return False
1725 return False
1726
1726
1727 def localpath(self):
1727 def localpath(self):
1728 if self.scheme == 'file' or self.scheme == 'bundle':
1728 if self.scheme == 'file' or self.scheme == 'bundle':
1729 path = self.path or '/'
1729 path = self.path or '/'
1730 # For Windows, we need to promote hosts containing drive
1730 # For Windows, we need to promote hosts containing drive
1731 # letters to paths with drive letters.
1731 # letters to paths with drive letters.
1732 if hasdriveletter(self._hostport):
1732 if hasdriveletter(self._hostport):
1733 path = self._hostport + '/' + self.path
1733 path = self._hostport + '/' + self.path
1734 elif (self.host is not None and self.path
1734 elif (self.host is not None and self.path
1735 and not hasdriveletter(path)):
1735 and not hasdriveletter(path)):
1736 path = '/' + path
1736 path = '/' + path
1737 return path
1737 return path
1738 return self._origpath
1738 return self._origpath
1739
1739
1740 def hasscheme(path):
1740 def hasscheme(path):
1741 return bool(url(path).scheme)
1741 return bool(url(path).scheme)
1742
1742
1743 def hasdriveletter(path):
1743 def hasdriveletter(path):
1744 return path and path[1:2] == ':' and path[0:1].isalpha()
1744 return path and path[1:2] == ':' and path[0:1].isalpha()
1745
1745
1746 def urllocalpath(path):
1746 def urllocalpath(path):
1747 return url(path, parsequery=False, parsefragment=False).localpath()
1747 return url(path, parsequery=False, parsefragment=False).localpath()
1748
1748
1749 def hidepassword(u):
1749 def hidepassword(u):
1750 '''hide user credential in a url string'''
1750 '''hide user credential in a url string'''
1751 u = url(u)
1751 u = url(u)
1752 if u.passwd:
1752 if u.passwd:
1753 u.passwd = '***'
1753 u.passwd = '***'
1754 return str(u)
1754 return str(u)
1755
1755
1756 def removeauth(u):
1756 def removeauth(u):
1757 '''remove all authentication information from a url string'''
1757 '''remove all authentication information from a url string'''
1758 u = url(u)
1758 u = url(u)
1759 u.user = u.passwd = None
1759 u.user = u.passwd = None
1760 return str(u)
1760 return str(u)
1761
1761
1762 def isatty(fd):
1762 def isatty(fd):
1763 try:
1763 try:
1764 return fd.isatty()
1764 return fd.isatty()
1765 except AttributeError:
1765 except AttributeError:
1766 return False
1766 return False
@@ -1,499 +1,499 b''
1 #
1 #
2 # This is the mercurial setup script.
2 # This is the mercurial setup script.
3 #
3 #
4 # 'python setup.py install', or
4 # 'python setup.py install', or
5 # 'python setup.py --help' for more options
5 # 'python setup.py --help' for more options
6
6
7 import sys, platform
7 import sys, platform
8 if getattr(sys, 'version_info', (0, 0, 0)) < (2, 4, 0, 'final'):
8 if getattr(sys, 'version_info', (0, 0, 0)) < (2, 4, 0, 'final'):
9 raise SystemExit("Mercurial requires Python 2.4 or later.")
9 raise SystemExit("Mercurial requires Python 2.4 or later.")
10
10
11 if sys.version_info[0] >= 3:
11 if sys.version_info[0] >= 3:
12 def b(s):
12 def b(s):
13 '''A helper function to emulate 2.6+ bytes literals using string
13 '''A helper function to emulate 2.6+ bytes literals using string
14 literals.'''
14 literals.'''
15 return s.encode('latin1')
15 return s.encode('latin1')
16 else:
16 else:
17 def b(s):
17 def b(s):
18 '''A helper function to emulate 2.6+ bytes literals using string
18 '''A helper function to emulate 2.6+ bytes literals using string
19 literals.'''
19 literals.'''
20 return s
20 return s
21
21
22 # Solaris Python packaging brain damage
22 # Solaris Python packaging brain damage
23 try:
23 try:
24 import hashlib
24 import hashlib
25 sha = hashlib.sha1()
25 sha = hashlib.sha1()
26 except:
26 except ImportError:
27 try:
27 try:
28 import sha
28 import sha
29 except:
29 except ImportError:
30 raise SystemExit(
30 raise SystemExit(
31 "Couldn't import standard hashlib (incomplete Python install).")
31 "Couldn't import standard hashlib (incomplete Python install).")
32
32
33 try:
33 try:
34 import zlib
34 import zlib
35 except:
35 except ImportError:
36 raise SystemExit(
36 raise SystemExit(
37 "Couldn't import standard zlib (incomplete Python install).")
37 "Couldn't import standard zlib (incomplete Python install).")
38
38
39 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
39 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
40 isironpython = False
40 isironpython = False
41 try:
41 try:
42 isironpython = (platform.python_implementation()
42 isironpython = (platform.python_implementation()
43 .lower().find("ironpython") != -1)
43 .lower().find("ironpython") != -1)
44 except:
44 except AttributeError:
45 pass
45 pass
46
46
47 if isironpython:
47 if isironpython:
48 sys.stderr.write("warning: IronPython detected (no bz2 support)\n")
48 sys.stderr.write("warning: IronPython detected (no bz2 support)\n")
49 else:
49 else:
50 try:
50 try:
51 import bz2
51 import bz2
52 except:
52 except ImportError:
53 raise SystemExit(
53 raise SystemExit(
54 "Couldn't import standard bz2 (incomplete Python install).")
54 "Couldn't import standard bz2 (incomplete Python install).")
55
55
56 import os, subprocess, time
56 import os, subprocess, time
57 import shutil
57 import shutil
58 import tempfile
58 import tempfile
59 from distutils import log
59 from distutils import log
60 from distutils.core import setup, Command, Extension
60 from distutils.core import setup, Command, Extension
61 from distutils.dist import Distribution
61 from distutils.dist import Distribution
62 from distutils.command.build import build
62 from distutils.command.build import build
63 from distutils.command.build_ext import build_ext
63 from distutils.command.build_ext import build_ext
64 from distutils.command.build_py import build_py
64 from distutils.command.build_py import build_py
65 from distutils.command.install_scripts import install_scripts
65 from distutils.command.install_scripts import install_scripts
66 from distutils.spawn import spawn, find_executable
66 from distutils.spawn import spawn, find_executable
67 from distutils.ccompiler import new_compiler
67 from distutils.ccompiler import new_compiler
68 from distutils.errors import CCompilerError, DistutilsExecError
68 from distutils.errors import CCompilerError, DistutilsExecError
69 from distutils.sysconfig import get_python_inc
69 from distutils.sysconfig import get_python_inc
70 from distutils.version import StrictVersion
70 from distutils.version import StrictVersion
71
71
72 convert2to3 = '--c2to3' in sys.argv
72 convert2to3 = '--c2to3' in sys.argv
73 if convert2to3:
73 if convert2to3:
74 try:
74 try:
75 from distutils.command.build_py import build_py_2to3 as build_py
75 from distutils.command.build_py import build_py_2to3 as build_py
76 from lib2to3.refactor import get_fixers_from_package as getfixers
76 from lib2to3.refactor import get_fixers_from_package as getfixers
77 except ImportError:
77 except ImportError:
78 if sys.version_info[0] < 3:
78 if sys.version_info[0] < 3:
79 raise SystemExit("--c2to3 is only compatible with python3.")
79 raise SystemExit("--c2to3 is only compatible with python3.")
80 raise
80 raise
81 sys.path.append('contrib')
81 sys.path.append('contrib')
82 elif sys.version_info[0] >= 3:
82 elif sys.version_info[0] >= 3:
83 raise SystemExit("setup.py with python3 needs --c2to3 (experimental)")
83 raise SystemExit("setup.py with python3 needs --c2to3 (experimental)")
84
84
85 scripts = ['hg']
85 scripts = ['hg']
86 if os.name == 'nt':
86 if os.name == 'nt':
87 scripts.append('contrib/win32/hg.bat')
87 scripts.append('contrib/win32/hg.bat')
88
88
89 # simplified version of distutils.ccompiler.CCompiler.has_function
89 # simplified version of distutils.ccompiler.CCompiler.has_function
90 # that actually removes its temporary files.
90 # that actually removes its temporary files.
91 def hasfunction(cc, funcname):
91 def hasfunction(cc, funcname):
92 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
92 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
93 devnull = oldstderr = None
93 devnull = oldstderr = None
94 try:
94 try:
95 try:
95 try:
96 fname = os.path.join(tmpdir, 'funcname.c')
96 fname = os.path.join(tmpdir, 'funcname.c')
97 f = open(fname, 'w')
97 f = open(fname, 'w')
98 f.write('int main(void) {\n')
98 f.write('int main(void) {\n')
99 f.write(' %s();\n' % funcname)
99 f.write(' %s();\n' % funcname)
100 f.write('}\n')
100 f.write('}\n')
101 f.close()
101 f.close()
102 # Redirect stderr to /dev/null to hide any error messages
102 # Redirect stderr to /dev/null to hide any error messages
103 # from the compiler.
103 # from the compiler.
104 # This will have to be changed if we ever have to check
104 # This will have to be changed if we ever have to check
105 # for a function on Windows.
105 # for a function on Windows.
106 devnull = open('/dev/null', 'w')
106 devnull = open('/dev/null', 'w')
107 oldstderr = os.dup(sys.stderr.fileno())
107 oldstderr = os.dup(sys.stderr.fileno())
108 os.dup2(devnull.fileno(), sys.stderr.fileno())
108 os.dup2(devnull.fileno(), sys.stderr.fileno())
109 objects = cc.compile([fname], output_dir=tmpdir)
109 objects = cc.compile([fname], output_dir=tmpdir)
110 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
110 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
111 except:
111 except:
112 return False
112 return False
113 return True
113 return True
114 finally:
114 finally:
115 if oldstderr is not None:
115 if oldstderr is not None:
116 os.dup2(oldstderr, sys.stderr.fileno())
116 os.dup2(oldstderr, sys.stderr.fileno())
117 if devnull is not None:
117 if devnull is not None:
118 devnull.close()
118 devnull.close()
119 shutil.rmtree(tmpdir)
119 shutil.rmtree(tmpdir)
120
120
121 # py2exe needs to be installed to work
121 # py2exe needs to be installed to work
122 try:
122 try:
123 import py2exe
123 import py2exe
124 py2exeloaded = True
124 py2exeloaded = True
125 # import py2exe's patched Distribution class
125 # import py2exe's patched Distribution class
126 from distutils.core import Distribution
126 from distutils.core import Distribution
127 except ImportError:
127 except ImportError:
128 py2exeloaded = False
128 py2exeloaded = False
129
129
130 def runcmd(cmd, env):
130 def runcmd(cmd, env):
131 if sys.platform == 'plan9':
131 if sys.platform == 'plan9':
132 # subprocess kludge to work around issues in half-baked Python
132 # subprocess kludge to work around issues in half-baked Python
133 # ports, notably bichued/python:
133 # ports, notably bichued/python:
134 _, out, err = os.popen3(cmd)
134 _, out, err = os.popen3(cmd)
135 return str(out), str(err)
135 return str(out), str(err)
136 else:
136 else:
137 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
137 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
138 stderr=subprocess.PIPE, env=env)
138 stderr=subprocess.PIPE, env=env)
139 out, err = p.communicate()
139 out, err = p.communicate()
140 return out, err
140 return out, err
141
141
142 def runhg(cmd, env):
142 def runhg(cmd, env):
143 out, err = runcmd(cmd, env)
143 out, err = runcmd(cmd, env)
144 # If root is executing setup.py, but the repository is owned by
144 # If root is executing setup.py, but the repository is owned by
145 # another user (as in "sudo python setup.py install") we will get
145 # another user (as in "sudo python setup.py install") we will get
146 # trust warnings since the .hg/hgrc file is untrusted. That is
146 # trust warnings since the .hg/hgrc file is untrusted. That is
147 # fine, we don't want to load it anyway. Python may warn about
147 # fine, we don't want to load it anyway. Python may warn about
148 # a missing __init__.py in mercurial/locale, we also ignore that.
148 # a missing __init__.py in mercurial/locale, we also ignore that.
149 err = [e for e in err.splitlines()
149 err = [e for e in err.splitlines()
150 if not e.startswith(b('Not trusting file')) \
150 if not e.startswith(b('Not trusting file')) \
151 and not e.startswith(b('warning: Not importing'))]
151 and not e.startswith(b('warning: Not importing'))]
152 if err:
152 if err:
153 return ''
153 return ''
154 return out
154 return out
155
155
156 version = ''
156 version = ''
157
157
158 # Execute hg out of this directory with a custom environment which
158 # Execute hg out of this directory with a custom environment which
159 # includes the pure Python modules in mercurial/pure. We also take
159 # includes the pure Python modules in mercurial/pure. We also take
160 # care to not use any hgrc files and do no localization.
160 # care to not use any hgrc files and do no localization.
161 pypath = ['mercurial', os.path.join('mercurial', 'pure')]
161 pypath = ['mercurial', os.path.join('mercurial', 'pure')]
162 env = {'PYTHONPATH': os.pathsep.join(pypath),
162 env = {'PYTHONPATH': os.pathsep.join(pypath),
163 'HGRCPATH': '',
163 'HGRCPATH': '',
164 'LANGUAGE': 'C'}
164 'LANGUAGE': 'C'}
165 if 'LD_LIBRARY_PATH' in os.environ:
165 if 'LD_LIBRARY_PATH' in os.environ:
166 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
166 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
167 if 'SystemRoot' in os.environ:
167 if 'SystemRoot' in os.environ:
168 # Copy SystemRoot into the custom environment for Python 2.6
168 # Copy SystemRoot into the custom environment for Python 2.6
169 # under Windows. Otherwise, the subprocess will fail with
169 # under Windows. Otherwise, the subprocess will fail with
170 # error 0xc0150004. See: http://bugs.python.org/issue3440
170 # error 0xc0150004. See: http://bugs.python.org/issue3440
171 env['SystemRoot'] = os.environ['SystemRoot']
171 env['SystemRoot'] = os.environ['SystemRoot']
172
172
173 if os.path.isdir('.hg'):
173 if os.path.isdir('.hg'):
174 cmd = [sys.executable, 'hg', 'id', '-i', '-t']
174 cmd = [sys.executable, 'hg', 'id', '-i', '-t']
175 l = runhg(cmd, env).split()
175 l = runhg(cmd, env).split()
176 while len(l) > 1 and l[-1][0].isalpha(): # remove non-numbered tags
176 while len(l) > 1 and l[-1][0].isalpha(): # remove non-numbered tags
177 l.pop()
177 l.pop()
178 if len(l) > 1: # tag found
178 if len(l) > 1: # tag found
179 version = l[-1]
179 version = l[-1]
180 if l[0].endswith('+'): # propagate the dirty status to the tag
180 if l[0].endswith('+'): # propagate the dirty status to the tag
181 version += '+'
181 version += '+'
182 elif len(l) == 1: # no tag found
182 elif len(l) == 1: # no tag found
183 cmd = [sys.executable, 'hg', 'parents', '--template',
183 cmd = [sys.executable, 'hg', 'parents', '--template',
184 '{latesttag}+{latesttagdistance}-']
184 '{latesttag}+{latesttagdistance}-']
185 version = runhg(cmd, env) + l[0]
185 version = runhg(cmd, env) + l[0]
186 if version.endswith('+'):
186 if version.endswith('+'):
187 version += time.strftime('%Y%m%d')
187 version += time.strftime('%Y%m%d')
188 elif os.path.exists('.hg_archival.txt'):
188 elif os.path.exists('.hg_archival.txt'):
189 kw = dict([[t.strip() for t in l.split(':', 1)]
189 kw = dict([[t.strip() for t in l.split(':', 1)]
190 for l in open('.hg_archival.txt')])
190 for l in open('.hg_archival.txt')])
191 if 'tag' in kw:
191 if 'tag' in kw:
192 version = kw['tag']
192 version = kw['tag']
193 elif 'latesttag' in kw:
193 elif 'latesttag' in kw:
194 version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
194 version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
195 else:
195 else:
196 version = kw.get('node', '')[:12]
196 version = kw.get('node', '')[:12]
197
197
198 if version:
198 if version:
199 f = open("mercurial/__version__.py", "w")
199 f = open("mercurial/__version__.py", "w")
200 f.write('# this file is autogenerated by setup.py\n')
200 f.write('# this file is autogenerated by setup.py\n')
201 f.write('version = "%s"\n' % version)
201 f.write('version = "%s"\n' % version)
202 f.close()
202 f.close()
203
203
204
204
205 try:
205 try:
206 from mercurial import __version__
206 from mercurial import __version__
207 version = __version__.version
207 version = __version__.version
208 except ImportError:
208 except ImportError:
209 version = 'unknown'
209 version = 'unknown'
210
210
211 class hgbuild(build):
211 class hgbuild(build):
212 # Insert hgbuildmo first so that files in mercurial/locale/ are found
212 # Insert hgbuildmo first so that files in mercurial/locale/ are found
213 # when build_py is run next.
213 # when build_py is run next.
214 sub_commands = [('build_mo', None),
214 sub_commands = [('build_mo', None),
215
215
216 # We also need build_ext before build_py. Otherwise, when 2to3 is
216 # We also need build_ext before build_py. Otherwise, when 2to3 is
217 # called (in build_py), it will not find osutil & friends,
217 # called (in build_py), it will not find osutil & friends,
218 # thinking that those modules are global and, consequently, making
218 # thinking that those modules are global and, consequently, making
219 # a mess, now that all module imports are global.
219 # a mess, now that all module imports are global.
220
220
221 ('build_ext', build.has_ext_modules),
221 ('build_ext', build.has_ext_modules),
222 ] + build.sub_commands
222 ] + build.sub_commands
223
223
224 class hgbuildmo(build):
224 class hgbuildmo(build):
225
225
226 description = "build translations (.mo files)"
226 description = "build translations (.mo files)"
227
227
228 def run(self):
228 def run(self):
229 if not find_executable('msgfmt'):
229 if not find_executable('msgfmt'):
230 self.warn("could not find msgfmt executable, no translations "
230 self.warn("could not find msgfmt executable, no translations "
231 "will be built")
231 "will be built")
232 return
232 return
233
233
234 podir = 'i18n'
234 podir = 'i18n'
235 if not os.path.isdir(podir):
235 if not os.path.isdir(podir):
236 self.warn("could not find %s/ directory" % podir)
236 self.warn("could not find %s/ directory" % podir)
237 return
237 return
238
238
239 join = os.path.join
239 join = os.path.join
240 for po in os.listdir(podir):
240 for po in os.listdir(podir):
241 if not po.endswith('.po'):
241 if not po.endswith('.po'):
242 continue
242 continue
243 pofile = join(podir, po)
243 pofile = join(podir, po)
244 modir = join('locale', po[:-3], 'LC_MESSAGES')
244 modir = join('locale', po[:-3], 'LC_MESSAGES')
245 mofile = join(modir, 'hg.mo')
245 mofile = join(modir, 'hg.mo')
246 mobuildfile = join('mercurial', mofile)
246 mobuildfile = join('mercurial', mofile)
247 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
247 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
248 if sys.platform != 'sunos5':
248 if sys.platform != 'sunos5':
249 # msgfmt on Solaris does not know about -c
249 # msgfmt on Solaris does not know about -c
250 cmd.append('-c')
250 cmd.append('-c')
251 self.mkpath(join('mercurial', modir))
251 self.mkpath(join('mercurial', modir))
252 self.make_file([pofile], mobuildfile, spawn, (cmd,))
252 self.make_file([pofile], mobuildfile, spawn, (cmd,))
253
253
254
254
255 class hgdist(Distribution):
255 class hgdist(Distribution):
256 pure = 0
256 pure = 0
257
257
258 global_options = Distribution.global_options + \
258 global_options = Distribution.global_options + \
259 [('pure', None, "use pure (slow) Python "
259 [('pure', None, "use pure (slow) Python "
260 "code instead of C extensions"),
260 "code instead of C extensions"),
261 ('c2to3', None, "(experimental!) convert "
261 ('c2to3', None, "(experimental!) convert "
262 "code with 2to3"),
262 "code with 2to3"),
263 ]
263 ]
264
264
265 def has_ext_modules(self):
265 def has_ext_modules(self):
266 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
266 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
267 # too late for some cases
267 # too late for some cases
268 return not self.pure and Distribution.has_ext_modules(self)
268 return not self.pure and Distribution.has_ext_modules(self)
269
269
270 class hgbuildext(build_ext):
270 class hgbuildext(build_ext):
271
271
272 def build_extension(self, ext):
272 def build_extension(self, ext):
273 try:
273 try:
274 build_ext.build_extension(self, ext)
274 build_ext.build_extension(self, ext)
275 except CCompilerError:
275 except CCompilerError:
276 if not getattr(ext, 'optional', False):
276 if not getattr(ext, 'optional', False):
277 raise
277 raise
278 log.warn("Failed to build optional extension '%s' (skipping)",
278 log.warn("Failed to build optional extension '%s' (skipping)",
279 ext.name)
279 ext.name)
280
280
281 class hgbuildpy(build_py):
281 class hgbuildpy(build_py):
282 if convert2to3:
282 if convert2to3:
283 fixer_names = sorted(set(getfixers("lib2to3.fixes") +
283 fixer_names = sorted(set(getfixers("lib2to3.fixes") +
284 getfixers("hgfixes")))
284 getfixers("hgfixes")))
285
285
286 def finalize_options(self):
286 def finalize_options(self):
287 build_py.finalize_options(self)
287 build_py.finalize_options(self)
288
288
289 if self.distribution.pure:
289 if self.distribution.pure:
290 if self.py_modules is None:
290 if self.py_modules is None:
291 self.py_modules = []
291 self.py_modules = []
292 for ext in self.distribution.ext_modules:
292 for ext in self.distribution.ext_modules:
293 if ext.name.startswith("mercurial."):
293 if ext.name.startswith("mercurial."):
294 self.py_modules.append("mercurial.pure.%s" % ext.name[10:])
294 self.py_modules.append("mercurial.pure.%s" % ext.name[10:])
295 self.distribution.ext_modules = []
295 self.distribution.ext_modules = []
296 else:
296 else:
297 if not os.path.exists(os.path.join(get_python_inc(), 'Python.h')):
297 if not os.path.exists(os.path.join(get_python_inc(), 'Python.h')):
298 raise SystemExit('Python headers are required to build '
298 raise SystemExit('Python headers are required to build '
299 'Mercurial')
299 'Mercurial')
300
300
301 def find_modules(self):
301 def find_modules(self):
302 modules = build_py.find_modules(self)
302 modules = build_py.find_modules(self)
303 for module in modules:
303 for module in modules:
304 if module[0] == "mercurial.pure":
304 if module[0] == "mercurial.pure":
305 if module[1] != "__init__":
305 if module[1] != "__init__":
306 yield ("mercurial", module[1], module[2])
306 yield ("mercurial", module[1], module[2])
307 else:
307 else:
308 yield module
308 yield module
309
309
310 class buildhgextindex(Command):
310 class buildhgextindex(Command):
311 description = 'generate prebuilt index of hgext (for frozen package)'
311 description = 'generate prebuilt index of hgext (for frozen package)'
312 user_options = []
312 user_options = []
313 _indexfilename = 'hgext/__index__.py'
313 _indexfilename = 'hgext/__index__.py'
314
314
315 def initialize_options(self):
315 def initialize_options(self):
316 pass
316 pass
317
317
318 def finalize_options(self):
318 def finalize_options(self):
319 pass
319 pass
320
320
321 def run(self):
321 def run(self):
322 if os.path.exists(self._indexfilename):
322 if os.path.exists(self._indexfilename):
323 os.unlink(self._indexfilename)
323 os.unlink(self._indexfilename)
324
324
325 # here no extension enabled, disabled() lists up everything
325 # here no extension enabled, disabled() lists up everything
326 code = ('import pprint; from mercurial import extensions; '
326 code = ('import pprint; from mercurial import extensions; '
327 'pprint.pprint(extensions.disabled())')
327 'pprint.pprint(extensions.disabled())')
328 out, err = runcmd([sys.executable, '-c', code], env)
328 out, err = runcmd([sys.executable, '-c', code], env)
329 if err:
329 if err:
330 raise DistutilsExecError(err)
330 raise DistutilsExecError(err)
331
331
332 f = open(self._indexfilename, 'w')
332 f = open(self._indexfilename, 'w')
333 f.write('# this file is autogenerated by setup.py\n')
333 f.write('# this file is autogenerated by setup.py\n')
334 f.write('docs = ')
334 f.write('docs = ')
335 f.write(out)
335 f.write(out)
336 f.close()
336 f.close()
337
337
338 class hginstallscripts(install_scripts):
338 class hginstallscripts(install_scripts):
339 '''
339 '''
340 This is a specialization of install_scripts that replaces the @LIBDIR@ with
340 This is a specialization of install_scripts that replaces the @LIBDIR@ with
341 the configured directory for modules. If possible, the path is made relative
341 the configured directory for modules. If possible, the path is made relative
342 to the directory for scripts.
342 to the directory for scripts.
343 '''
343 '''
344
344
345 def initialize_options(self):
345 def initialize_options(self):
346 install_scripts.initialize_options(self)
346 install_scripts.initialize_options(self)
347
347
348 self.install_lib = None
348 self.install_lib = None
349
349
350 def finalize_options(self):
350 def finalize_options(self):
351 install_scripts.finalize_options(self)
351 install_scripts.finalize_options(self)
352 self.set_undefined_options('install',
352 self.set_undefined_options('install',
353 ('install_lib', 'install_lib'))
353 ('install_lib', 'install_lib'))
354
354
355 def run(self):
355 def run(self):
356 install_scripts.run(self)
356 install_scripts.run(self)
357
357
358 if (os.path.splitdrive(self.install_dir)[0] !=
358 if (os.path.splitdrive(self.install_dir)[0] !=
359 os.path.splitdrive(self.install_lib)[0]):
359 os.path.splitdrive(self.install_lib)[0]):
360 # can't make relative paths from one drive to another, so use an
360 # can't make relative paths from one drive to another, so use an
361 # absolute path instead
361 # absolute path instead
362 libdir = self.install_lib
362 libdir = self.install_lib
363 else:
363 else:
364 common = os.path.commonprefix((self.install_dir, self.install_lib))
364 common = os.path.commonprefix((self.install_dir, self.install_lib))
365 rest = self.install_dir[len(common):]
365 rest = self.install_dir[len(common):]
366 uplevel = len([n for n in os.path.split(rest) if n])
366 uplevel = len([n for n in os.path.split(rest) if n])
367
367
368 libdir = uplevel * ('..' + os.sep) + self.install_lib[len(common):]
368 libdir = uplevel * ('..' + os.sep) + self.install_lib[len(common):]
369
369
370 for outfile in self.outfiles:
370 for outfile in self.outfiles:
371 fp = open(outfile, 'rb')
371 fp = open(outfile, 'rb')
372 data = fp.read()
372 data = fp.read()
373 fp.close()
373 fp.close()
374
374
375 # skip binary files
375 # skip binary files
376 if b('\0') in data:
376 if b('\0') in data:
377 continue
377 continue
378
378
379 data = data.replace('@LIBDIR@', libdir.encode('string_escape'))
379 data = data.replace('@LIBDIR@', libdir.encode('string_escape'))
380 fp = open(outfile, 'wb')
380 fp = open(outfile, 'wb')
381 fp.write(data)
381 fp.write(data)
382 fp.close()
382 fp.close()
383
383
384 cmdclass = {'build': hgbuild,
384 cmdclass = {'build': hgbuild,
385 'build_mo': hgbuildmo,
385 'build_mo': hgbuildmo,
386 'build_ext': hgbuildext,
386 'build_ext': hgbuildext,
387 'build_py': hgbuildpy,
387 'build_py': hgbuildpy,
388 'build_hgextindex': buildhgextindex,
388 'build_hgextindex': buildhgextindex,
389 'install_scripts': hginstallscripts}
389 'install_scripts': hginstallscripts}
390
390
391 packages = ['mercurial', 'mercurial.hgweb',
391 packages = ['mercurial', 'mercurial.hgweb',
392 'mercurial.httpclient', 'mercurial.httpclient.tests',
392 'mercurial.httpclient', 'mercurial.httpclient.tests',
393 'hgext', 'hgext.convert', 'hgext.highlight', 'hgext.zeroconf',
393 'hgext', 'hgext.convert', 'hgext.highlight', 'hgext.zeroconf',
394 'hgext.largefiles']
394 'hgext.largefiles']
395
395
396 pymodules = []
396 pymodules = []
397
397
398 extmodules = [
398 extmodules = [
399 Extension('mercurial.base85', ['mercurial/base85.c']),
399 Extension('mercurial.base85', ['mercurial/base85.c']),
400 Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
400 Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
401 Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']),
401 Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']),
402 Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
402 Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
403 Extension('mercurial.parsers', ['mercurial/parsers.c']),
403 Extension('mercurial.parsers', ['mercurial/parsers.c']),
404 ]
404 ]
405
405
406 osutil_ldflags = []
406 osutil_ldflags = []
407
407
408 if sys.platform == 'darwin':
408 if sys.platform == 'darwin':
409 osutil_ldflags += ['-framework', 'ApplicationServices']
409 osutil_ldflags += ['-framework', 'ApplicationServices']
410
410
411 # disable osutil.c under windows + python 2.4 (issue1364)
411 # disable osutil.c under windows + python 2.4 (issue1364)
412 if sys.platform == 'win32' and sys.version_info < (2, 5, 0, 'final'):
412 if sys.platform == 'win32' and sys.version_info < (2, 5, 0, 'final'):
413 pymodules.append('mercurial.pure.osutil')
413 pymodules.append('mercurial.pure.osutil')
414 else:
414 else:
415 extmodules.append(Extension('mercurial.osutil', ['mercurial/osutil.c'],
415 extmodules.append(Extension('mercurial.osutil', ['mercurial/osutil.c'],
416 extra_link_args=osutil_ldflags))
416 extra_link_args=osutil_ldflags))
417
417
418 if sys.platform.startswith('linux') and os.uname()[2] > '2.6':
418 if sys.platform.startswith('linux') and os.uname()[2] > '2.6':
419 # The inotify extension is only usable with Linux 2.6 kernels.
419 # The inotify extension is only usable with Linux 2.6 kernels.
420 # You also need a reasonably recent C library.
420 # You also need a reasonably recent C library.
421 # In any case, if it fails to build the error will be skipped ('optional').
421 # In any case, if it fails to build the error will be skipped ('optional').
422 cc = new_compiler()
422 cc = new_compiler()
423 if hasfunction(cc, 'inotify_add_watch'):
423 if hasfunction(cc, 'inotify_add_watch'):
424 inotify = Extension('hgext.inotify.linux._inotify',
424 inotify = Extension('hgext.inotify.linux._inotify',
425 ['hgext/inotify/linux/_inotify.c'],
425 ['hgext/inotify/linux/_inotify.c'],
426 ['mercurial'])
426 ['mercurial'])
427 inotify.optional = True
427 inotify.optional = True
428 extmodules.append(inotify)
428 extmodules.append(inotify)
429 packages.extend(['hgext.inotify', 'hgext.inotify.linux'])
429 packages.extend(['hgext.inotify', 'hgext.inotify.linux'])
430
430
431 packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
431 packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
432 'help/*.txt']}
432 'help/*.txt']}
433
433
434 def ordinarypath(p):
434 def ordinarypath(p):
435 return p and p[0] != '.' and p[-1] != '~'
435 return p and p[0] != '.' and p[-1] != '~'
436
436
437 for root in ('templates',):
437 for root in ('templates',):
438 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
438 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
439 curdir = curdir.split(os.sep, 1)[1]
439 curdir = curdir.split(os.sep, 1)[1]
440 dirs[:] = filter(ordinarypath, dirs)
440 dirs[:] = filter(ordinarypath, dirs)
441 for f in filter(ordinarypath, files):
441 for f in filter(ordinarypath, files):
442 f = os.path.join(curdir, f)
442 f = os.path.join(curdir, f)
443 packagedata['mercurial'].append(f)
443 packagedata['mercurial'].append(f)
444
444
445 datafiles = []
445 datafiles = []
446 setupversion = version
446 setupversion = version
447 extra = {}
447 extra = {}
448
448
449 if py2exeloaded:
449 if py2exeloaded:
450 extra['console'] = [
450 extra['console'] = [
451 {'script':'hg',
451 {'script':'hg',
452 'copyright':'Copyright (C) 2005-2010 Matt Mackall and others',
452 'copyright':'Copyright (C) 2005-2010 Matt Mackall and others',
453 'product_version':version}]
453 'product_version':version}]
454 # sub command of 'build' because 'py2exe' does not handle sub_commands
454 # sub command of 'build' because 'py2exe' does not handle sub_commands
455 build.sub_commands.insert(0, ('build_hgextindex', None))
455 build.sub_commands.insert(0, ('build_hgextindex', None))
456
456
457 if os.name == 'nt':
457 if os.name == 'nt':
458 # Windows binary file versions for exe/dll files must have the
458 # Windows binary file versions for exe/dll files must have the
459 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
459 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
460 setupversion = version.split('+', 1)[0]
460 setupversion = version.split('+', 1)[0]
461
461
462 if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
462 if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
463 # XCode 4.0 dropped support for ppc architecture, which is hardcoded in
463 # XCode 4.0 dropped support for ppc architecture, which is hardcoded in
464 # distutils.sysconfig
464 # distutils.sysconfig
465 version = runcmd(['/usr/bin/xcodebuild', '-version'], {})[0].splitlines()
465 version = runcmd(['/usr/bin/xcodebuild', '-version'], {})[0].splitlines()
466 if version:
466 if version:
467 version = version[0]
467 version = version[0]
468 xcode4 = (version.startswith('Xcode') and
468 xcode4 = (version.startswith('Xcode') and
469 StrictVersion(version.split()[1]) >= StrictVersion('4.0'))
469 StrictVersion(version.split()[1]) >= StrictVersion('4.0'))
470 else:
470 else:
471 # xcodebuild returns empty on OS X Lion with XCode 4.3 not
471 # xcodebuild returns empty on OS X Lion with XCode 4.3 not
472 # installed, but instead with only command-line tools. Assume
472 # installed, but instead with only command-line tools. Assume
473 # that only happens on >= Lion, thus no PPC support.
473 # that only happens on >= Lion, thus no PPC support.
474 xcode4 = True
474 xcode4 = True
475
475
476 if xcode4:
476 if xcode4:
477 os.environ['ARCHFLAGS'] = ''
477 os.environ['ARCHFLAGS'] = ''
478
478
479 setup(name='mercurial',
479 setup(name='mercurial',
480 version=setupversion,
480 version=setupversion,
481 author='Matt Mackall',
481 author='Matt Mackall',
482 author_email='mpm@selenic.com',
482 author_email='mpm@selenic.com',
483 url='http://mercurial.selenic.com/',
483 url='http://mercurial.selenic.com/',
484 description='Scalable distributed SCM',
484 description='Scalable distributed SCM',
485 license='GNU GPLv2+',
485 license='GNU GPLv2+',
486 scripts=scripts,
486 scripts=scripts,
487 packages=packages,
487 packages=packages,
488 py_modules=pymodules,
488 py_modules=pymodules,
489 ext_modules=extmodules,
489 ext_modules=extmodules,
490 data_files=datafiles,
490 data_files=datafiles,
491 package_data=packagedata,
491 package_data=packagedata,
492 cmdclass=cmdclass,
492 cmdclass=cmdclass,
493 distclass=hgdist,
493 distclass=hgdist,
494 options=dict(py2exe=dict(packages=['hgext', 'email']),
494 options=dict(py2exe=dict(packages=['hgext', 'email']),
495 bdist_mpkg=dict(zipdist=True,
495 bdist_mpkg=dict(zipdist=True,
496 license='COPYING',
496 license='COPYING',
497 readme='contrib/macosx/Readme.html',
497 readme='contrib/macosx/Readme.html',
498 welcome='contrib/macosx/Welcome.html')),
498 welcome='contrib/macosx/Welcome.html')),
499 **extra)
499 **extra)
@@ -1,345 +1,345 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 """Test the running system for features availability. Exit with zero
2 """Test the running system for features availability. Exit with zero
3 if all features are there, non-zero otherwise. If a feature name is
3 if all features are there, non-zero otherwise. If a feature name is
4 prefixed with "no-", the absence of feature is tested.
4 prefixed with "no-", the absence of feature is tested.
5 """
5 """
6 import optparse
6 import optparse
7 import os, stat
7 import os, stat
8 import re
8 import re
9 import sys
9 import sys
10 import tempfile
10 import tempfile
11
11
12 tempprefix = 'hg-hghave-'
12 tempprefix = 'hg-hghave-'
13
13
14 def matchoutput(cmd, regexp, ignorestatus=False):
14 def matchoutput(cmd, regexp, ignorestatus=False):
15 """Return True if cmd executes successfully and its output
15 """Return True if cmd executes successfully and its output
16 is matched by the supplied regular expression.
16 is matched by the supplied regular expression.
17 """
17 """
18 r = re.compile(regexp)
18 r = re.compile(regexp)
19 fh = os.popen(cmd)
19 fh = os.popen(cmd)
20 s = fh.read()
20 s = fh.read()
21 try:
21 try:
22 ret = fh.close()
22 ret = fh.close()
23 except IOError:
23 except IOError:
24 # Happen in Windows test environment
24 # Happen in Windows test environment
25 ret = 1
25 ret = 1
26 return (ignorestatus or ret is None) and r.search(s)
26 return (ignorestatus or ret is None) and r.search(s)
27
27
28 def has_baz():
28 def has_baz():
29 return matchoutput('baz --version 2>&1', r'baz Bazaar version')
29 return matchoutput('baz --version 2>&1', r'baz Bazaar version')
30
30
31 def has_bzr():
31 def has_bzr():
32 try:
32 try:
33 import bzrlib
33 import bzrlib
34 return bzrlib.__doc__ is not None
34 return bzrlib.__doc__ is not None
35 except ImportError:
35 except ImportError:
36 return False
36 return False
37
37
38 def has_bzr114():
38 def has_bzr114():
39 try:
39 try:
40 import bzrlib
40 import bzrlib
41 return (bzrlib.__doc__ is not None
41 return (bzrlib.__doc__ is not None
42 and bzrlib.version_info[:2] >= (1, 14))
42 and bzrlib.version_info[:2] >= (1, 14))
43 except ImportError:
43 except ImportError:
44 return False
44 return False
45
45
46 def has_cvs():
46 def has_cvs():
47 re = r'Concurrent Versions System.*?server'
47 re = r'Concurrent Versions System.*?server'
48 return matchoutput('cvs --version 2>&1', re) and not has_msys()
48 return matchoutput('cvs --version 2>&1', re) and not has_msys()
49
49
50 def has_darcs():
50 def has_darcs():
51 return matchoutput('darcs --version', r'2\.[2-9]', True)
51 return matchoutput('darcs --version', r'2\.[2-9]', True)
52
52
53 def has_mtn():
53 def has_mtn():
54 return matchoutput('mtn --version', r'monotone', True) and not matchoutput(
54 return matchoutput('mtn --version', r'monotone', True) and not matchoutput(
55 'mtn --version', r'monotone 0\.', True)
55 'mtn --version', r'monotone 0\.', True)
56
56
57 def has_eol_in_paths():
57 def has_eol_in_paths():
58 try:
58 try:
59 fd, path = tempfile.mkstemp(prefix=tempprefix, suffix='\n\r')
59 fd, path = tempfile.mkstemp(prefix=tempprefix, suffix='\n\r')
60 os.close(fd)
60 os.close(fd)
61 os.remove(path)
61 os.remove(path)
62 return True
62 return True
63 except:
63 except (IOError, OSError):
64 return False
64 return False
65
65
66 def has_executablebit():
66 def has_executablebit():
67 try:
67 try:
68 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
68 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
69 fh, fn = tempfile.mkstemp(dir=".", prefix='hg-checkexec-')
69 fh, fn = tempfile.mkstemp(dir=".", prefix='hg-checkexec-')
70 try:
70 try:
71 os.close(fh)
71 os.close(fh)
72 m = os.stat(fn).st_mode & 0777
72 m = os.stat(fn).st_mode & 0777
73 new_file_has_exec = m & EXECFLAGS
73 new_file_has_exec = m & EXECFLAGS
74 os.chmod(fn, m ^ EXECFLAGS)
74 os.chmod(fn, m ^ EXECFLAGS)
75 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
75 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
76 finally:
76 finally:
77 os.unlink(fn)
77 os.unlink(fn)
78 except (IOError, OSError):
78 except (IOError, OSError):
79 # we don't care, the user probably won't be able to commit anyway
79 # we don't care, the user probably won't be able to commit anyway
80 return False
80 return False
81 return not (new_file_has_exec or exec_flags_cannot_flip)
81 return not (new_file_has_exec or exec_flags_cannot_flip)
82
82
83 def has_icasefs():
83 def has_icasefs():
84 # Stolen from mercurial.util
84 # Stolen from mercurial.util
85 fd, path = tempfile.mkstemp(prefix=tempprefix, dir='.')
85 fd, path = tempfile.mkstemp(prefix=tempprefix, dir='.')
86 os.close(fd)
86 os.close(fd)
87 try:
87 try:
88 s1 = os.stat(path)
88 s1 = os.stat(path)
89 d, b = os.path.split(path)
89 d, b = os.path.split(path)
90 p2 = os.path.join(d, b.upper())
90 p2 = os.path.join(d, b.upper())
91 if path == p2:
91 if path == p2:
92 p2 = os.path.join(d, b.lower())
92 p2 = os.path.join(d, b.lower())
93 try:
93 try:
94 s2 = os.stat(p2)
94 s2 = os.stat(p2)
95 return s2 == s1
95 return s2 == s1
96 except:
96 except OSError:
97 return False
97 return False
98 finally:
98 finally:
99 os.remove(path)
99 os.remove(path)
100
100
101 def has_inotify():
101 def has_inotify():
102 try:
102 try:
103 import hgext.inotify.linux.watcher
103 import hgext.inotify.linux.watcher
104 return True
104 return True
105 except ImportError:
105 except ImportError:
106 return False
106 return False
107
107
108 def has_fifo():
108 def has_fifo():
109 return getattr(os, "mkfifo", None) is not None
109 return getattr(os, "mkfifo", None) is not None
110
110
111 def has_cacheable_fs():
111 def has_cacheable_fs():
112 from mercurial import util
112 from mercurial import util
113
113
114 fd, path = tempfile.mkstemp(prefix=tempprefix)
114 fd, path = tempfile.mkstemp(prefix=tempprefix)
115 os.close(fd)
115 os.close(fd)
116 try:
116 try:
117 return util.cachestat(path).cacheable()
117 return util.cachestat(path).cacheable()
118 finally:
118 finally:
119 os.remove(path)
119 os.remove(path)
120
120
121 def has_lsprof():
121 def has_lsprof():
122 try:
122 try:
123 import _lsprof
123 import _lsprof
124 return True
124 return True
125 except ImportError:
125 except ImportError:
126 return False
126 return False
127
127
128 def has_gettext():
128 def has_gettext():
129 return matchoutput('msgfmt --version', 'GNU gettext-tools')
129 return matchoutput('msgfmt --version', 'GNU gettext-tools')
130
130
131 def has_git():
131 def has_git():
132 return matchoutput('git --version 2>&1', r'^git version')
132 return matchoutput('git --version 2>&1', r'^git version')
133
133
134 def has_docutils():
134 def has_docutils():
135 try:
135 try:
136 from docutils.core import publish_cmdline
136 from docutils.core import publish_cmdline
137 return True
137 return True
138 except ImportError:
138 except ImportError:
139 return False
139 return False
140
140
141 def getsvnversion():
141 def getsvnversion():
142 m = matchoutput('svn --version 2>&1', r'^svn,\s+version\s+(\d+)\.(\d+)')
142 m = matchoutput('svn --version 2>&1', r'^svn,\s+version\s+(\d+)\.(\d+)')
143 if not m:
143 if not m:
144 return (0, 0)
144 return (0, 0)
145 return (int(m.group(1)), int(m.group(2)))
145 return (int(m.group(1)), int(m.group(2)))
146
146
147 def has_svn15():
147 def has_svn15():
148 return getsvnversion() >= (1, 5)
148 return getsvnversion() >= (1, 5)
149
149
150 def has_svn13():
150 def has_svn13():
151 return getsvnversion() >= (1, 3)
151 return getsvnversion() >= (1, 3)
152
152
153 def has_svn():
153 def has_svn():
154 return matchoutput('svn --version 2>&1', r'^svn, version') and \
154 return matchoutput('svn --version 2>&1', r'^svn, version') and \
155 matchoutput('svnadmin --version 2>&1', r'^svnadmin, version')
155 matchoutput('svnadmin --version 2>&1', r'^svnadmin, version')
156
156
157 def has_svn_bindings():
157 def has_svn_bindings():
158 try:
158 try:
159 import svn.core
159 import svn.core
160 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
160 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
161 if version < (1, 4):
161 if version < (1, 4):
162 return False
162 return False
163 return True
163 return True
164 except ImportError:
164 except ImportError:
165 return False
165 return False
166
166
167 def has_p4():
167 def has_p4():
168 return (matchoutput('p4 -V', r'Rev\. P4/') and
168 return (matchoutput('p4 -V', r'Rev\. P4/') and
169 matchoutput('p4d -V', r'Rev\. P4D/'))
169 matchoutput('p4d -V', r'Rev\. P4D/'))
170
170
171 def has_symlink():
171 def has_symlink():
172 if getattr(os, "symlink", None) is None:
172 if getattr(os, "symlink", None) is None:
173 return False
173 return False
174 name = tempfile.mktemp(dir=".", prefix='hg-checklink-')
174 name = tempfile.mktemp(dir=".", prefix='hg-checklink-')
175 try:
175 try:
176 os.symlink(".", name)
176 os.symlink(".", name)
177 os.unlink(name)
177 os.unlink(name)
178 return True
178 return True
179 except (OSError, AttributeError):
179 except (OSError, AttributeError):
180 return False
180 return False
181
181
182 def has_tla():
182 def has_tla():
183 return matchoutput('tla --version 2>&1', r'The GNU Arch Revision')
183 return matchoutput('tla --version 2>&1', r'The GNU Arch Revision')
184
184
185 def has_gpg():
185 def has_gpg():
186 return matchoutput('gpg --version 2>&1', r'GnuPG')
186 return matchoutput('gpg --version 2>&1', r'GnuPG')
187
187
188 def has_unix_permissions():
188 def has_unix_permissions():
189 d = tempfile.mkdtemp(prefix=tempprefix, dir=".")
189 d = tempfile.mkdtemp(prefix=tempprefix, dir=".")
190 try:
190 try:
191 fname = os.path.join(d, 'foo')
191 fname = os.path.join(d, 'foo')
192 for umask in (077, 007, 022):
192 for umask in (077, 007, 022):
193 os.umask(umask)
193 os.umask(umask)
194 f = open(fname, 'w')
194 f = open(fname, 'w')
195 f.close()
195 f.close()
196 mode = os.stat(fname).st_mode
196 mode = os.stat(fname).st_mode
197 os.unlink(fname)
197 os.unlink(fname)
198 if mode & 0777 != ~umask & 0666:
198 if mode & 0777 != ~umask & 0666:
199 return False
199 return False
200 return True
200 return True
201 finally:
201 finally:
202 os.rmdir(d)
202 os.rmdir(d)
203
203
204 def has_pyflakes():
204 def has_pyflakes():
205 return matchoutput('echo "import re" 2>&1 | pyflakes',
205 return matchoutput('echo "import re" 2>&1 | pyflakes',
206 r"<stdin>:1: 're' imported but unused",
206 r"<stdin>:1: 're' imported but unused",
207 True)
207 True)
208
208
209 def has_pygments():
209 def has_pygments():
210 try:
210 try:
211 import pygments
211 import pygments
212 return True
212 return True
213 except ImportError:
213 except ImportError:
214 return False
214 return False
215
215
216 def has_outer_repo():
216 def has_outer_repo():
217 return matchoutput('hg root 2>&1', r'')
217 return matchoutput('hg root 2>&1', r'')
218
218
219 def has_ssl():
219 def has_ssl():
220 try:
220 try:
221 import ssl
221 import ssl
222 import OpenSSL
222 import OpenSSL
223 OpenSSL.SSL.Context
223 OpenSSL.SSL.Context
224 return True
224 return True
225 except ImportError:
225 except ImportError:
226 return False
226 return False
227
227
228 def has_windows():
228 def has_windows():
229 return os.name == 'nt'
229 return os.name == 'nt'
230
230
231 def has_system_sh():
231 def has_system_sh():
232 return os.name != 'nt'
232 return os.name != 'nt'
233
233
234 def has_serve():
234 def has_serve():
235 return os.name != 'nt' # gross approximation
235 return os.name != 'nt' # gross approximation
236
236
237 def has_tic():
237 def has_tic():
238 return matchoutput('test -x "`which tic`"', '')
238 return matchoutput('test -x "`which tic`"', '')
239
239
240 def has_msys():
240 def has_msys():
241 return os.getenv('MSYSTEM')
241 return os.getenv('MSYSTEM')
242
242
243 checks = {
243 checks = {
244 "baz": (has_baz, "GNU Arch baz client"),
244 "baz": (has_baz, "GNU Arch baz client"),
245 "bzr": (has_bzr, "Canonical's Bazaar client"),
245 "bzr": (has_bzr, "Canonical's Bazaar client"),
246 "bzr114": (has_bzr114, "Canonical's Bazaar client >= 1.14"),
246 "bzr114": (has_bzr114, "Canonical's Bazaar client >= 1.14"),
247 "cacheable": (has_cacheable_fs, "cacheable filesystem"),
247 "cacheable": (has_cacheable_fs, "cacheable filesystem"),
248 "cvs": (has_cvs, "cvs client/server"),
248 "cvs": (has_cvs, "cvs client/server"),
249 "darcs": (has_darcs, "darcs client"),
249 "darcs": (has_darcs, "darcs client"),
250 "docutils": (has_docutils, "Docutils text processing library"),
250 "docutils": (has_docutils, "Docutils text processing library"),
251 "eol-in-paths": (has_eol_in_paths, "end-of-lines in paths"),
251 "eol-in-paths": (has_eol_in_paths, "end-of-lines in paths"),
252 "execbit": (has_executablebit, "executable bit"),
252 "execbit": (has_executablebit, "executable bit"),
253 "fifo": (has_fifo, "named pipes"),
253 "fifo": (has_fifo, "named pipes"),
254 "gettext": (has_gettext, "GNU Gettext (msgfmt)"),
254 "gettext": (has_gettext, "GNU Gettext (msgfmt)"),
255 "git": (has_git, "git command line client"),
255 "git": (has_git, "git command line client"),
256 "gpg": (has_gpg, "gpg client"),
256 "gpg": (has_gpg, "gpg client"),
257 "icasefs": (has_icasefs, "case insensitive file system"),
257 "icasefs": (has_icasefs, "case insensitive file system"),
258 "inotify": (has_inotify, "inotify extension support"),
258 "inotify": (has_inotify, "inotify extension support"),
259 "lsprof": (has_lsprof, "python lsprof module"),
259 "lsprof": (has_lsprof, "python lsprof module"),
260 "mtn": (has_mtn, "monotone client (>= 1.0)"),
260 "mtn": (has_mtn, "monotone client (>= 1.0)"),
261 "outer-repo": (has_outer_repo, "outer repo"),
261 "outer-repo": (has_outer_repo, "outer repo"),
262 "p4": (has_p4, "Perforce server and client"),
262 "p4": (has_p4, "Perforce server and client"),
263 "pyflakes": (has_pyflakes, "Pyflakes python linter"),
263 "pyflakes": (has_pyflakes, "Pyflakes python linter"),
264 "pygments": (has_pygments, "Pygments source highlighting library"),
264 "pygments": (has_pygments, "Pygments source highlighting library"),
265 "serve": (has_serve, "platform and python can manage 'hg serve -d'"),
265 "serve": (has_serve, "platform and python can manage 'hg serve -d'"),
266 "ssl": (has_ssl, "python >= 2.6 ssl module and python OpenSSL"),
266 "ssl": (has_ssl, "python >= 2.6 ssl module and python OpenSSL"),
267 "svn": (has_svn, "subversion client and admin tools"),
267 "svn": (has_svn, "subversion client and admin tools"),
268 "svn13": (has_svn13, "subversion client and admin tools >= 1.3"),
268 "svn13": (has_svn13, "subversion client and admin tools >= 1.3"),
269 "svn15": (has_svn15, "subversion client and admin tools >= 1.5"),
269 "svn15": (has_svn15, "subversion client and admin tools >= 1.5"),
270 "svn-bindings": (has_svn_bindings, "subversion python bindings"),
270 "svn-bindings": (has_svn_bindings, "subversion python bindings"),
271 "symlink": (has_symlink, "symbolic links"),
271 "symlink": (has_symlink, "symbolic links"),
272 "system-sh": (has_system_sh, "system() uses sh"),
272 "system-sh": (has_system_sh, "system() uses sh"),
273 "tic": (has_tic, "terminfo compiler"),
273 "tic": (has_tic, "terminfo compiler"),
274 "tla": (has_tla, "GNU Arch tla client"),
274 "tla": (has_tla, "GNU Arch tla client"),
275 "unix-permissions": (has_unix_permissions, "unix-style permissions"),
275 "unix-permissions": (has_unix_permissions, "unix-style permissions"),
276 "windows": (has_windows, "Windows"),
276 "windows": (has_windows, "Windows"),
277 "msys": (has_msys, "Windows with MSYS"),
277 "msys": (has_msys, "Windows with MSYS"),
278 }
278 }
279
279
280 def list_features():
280 def list_features():
281 for name, feature in checks.iteritems():
281 for name, feature in checks.iteritems():
282 desc = feature[1]
282 desc = feature[1]
283 print name + ':', desc
283 print name + ':', desc
284
284
285 def test_features():
285 def test_features():
286 failed = 0
286 failed = 0
287 for name, feature in checks.iteritems():
287 for name, feature in checks.iteritems():
288 check, _ = feature
288 check, _ = feature
289 try:
289 try:
290 check()
290 check()
291 except Exception, e:
291 except Exception, e:
292 print "feature %s failed: %s" % (name, e)
292 print "feature %s failed: %s" % (name, e)
293 failed += 1
293 failed += 1
294 return failed
294 return failed
295
295
296 parser = optparse.OptionParser("%prog [options] [features]")
296 parser = optparse.OptionParser("%prog [options] [features]")
297 parser.add_option("--test-features", action="store_true",
297 parser.add_option("--test-features", action="store_true",
298 help="test available features")
298 help="test available features")
299 parser.add_option("--list-features", action="store_true",
299 parser.add_option("--list-features", action="store_true",
300 help="list available features")
300 help="list available features")
301 parser.add_option("-q", "--quiet", action="store_true",
301 parser.add_option("-q", "--quiet", action="store_true",
302 help="check features silently")
302 help="check features silently")
303
303
304 if __name__ == '__main__':
304 if __name__ == '__main__':
305 options, args = parser.parse_args()
305 options, args = parser.parse_args()
306 if options.list_features:
306 if options.list_features:
307 list_features()
307 list_features()
308 sys.exit(0)
308 sys.exit(0)
309
309
310 if options.test_features:
310 if options.test_features:
311 sys.exit(test_features())
311 sys.exit(test_features())
312
312
313 quiet = options.quiet
313 quiet = options.quiet
314
314
315 failures = 0
315 failures = 0
316
316
317 def error(msg):
317 def error(msg):
318 global failures
318 global failures
319 if not quiet:
319 if not quiet:
320 sys.stderr.write(msg + '\n')
320 sys.stderr.write(msg + '\n')
321 failures += 1
321 failures += 1
322
322
323 for feature in args:
323 for feature in args:
324 negate = feature.startswith('no-')
324 negate = feature.startswith('no-')
325 if negate:
325 if negate:
326 feature = feature[3:]
326 feature = feature[3:]
327
327
328 if feature not in checks:
328 if feature not in checks:
329 error('skipped: unknown feature: ' + feature)
329 error('skipped: unknown feature: ' + feature)
330 continue
330 continue
331
331
332 check, desc = checks[feature]
332 check, desc = checks[feature]
333 try:
333 try:
334 available = check()
334 available = check()
335 except Exception, e:
335 except Exception, e:
336 error('hghave check failed: ' + feature)
336 error('hghave check failed: ' + feature)
337 continue
337 continue
338
338
339 if not negate and not available:
339 if not negate and not available:
340 error('skipped: missing feature: ' + desc)
340 error('skipped: missing feature: ' + desc)
341 elif negate and available:
341 elif negate and available:
342 error('skipped: system supports %s' % desc)
342 error('skipped: system supports %s' % desc)
343
343
344 if failures != 0:
344 if failures != 0:
345 sys.exit(1)
345 sys.exit(1)
@@ -1,1283 +1,1283 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # run-tests.py - Run a set of tests on Mercurial
3 # run-tests.py - Run a set of tests on Mercurial
4 #
4 #
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 # Modifying this script is tricky because it has many modes:
10 # Modifying this script is tricky because it has many modes:
11 # - serial (default) vs parallel (-jN, N > 1)
11 # - serial (default) vs parallel (-jN, N > 1)
12 # - no coverage (default) vs coverage (-c, -C, -s)
12 # - no coverage (default) vs coverage (-c, -C, -s)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
14 # - tests are a mix of shell scripts and Python scripts
14 # - tests are a mix of shell scripts and Python scripts
15 #
15 #
16 # If you change this script, it is recommended that you ensure you
16 # If you change this script, it is recommended that you ensure you
17 # haven't broken it by running it in various modes with a representative
17 # haven't broken it by running it in various modes with a representative
18 # sample of test scripts. For example:
18 # sample of test scripts. For example:
19 #
19 #
20 # 1) serial, no coverage, temp install:
20 # 1) serial, no coverage, temp install:
21 # ./run-tests.py test-s*
21 # ./run-tests.py test-s*
22 # 2) serial, no coverage, local hg:
22 # 2) serial, no coverage, local hg:
23 # ./run-tests.py --local test-s*
23 # ./run-tests.py --local test-s*
24 # 3) serial, coverage, temp install:
24 # 3) serial, coverage, temp install:
25 # ./run-tests.py -c test-s*
25 # ./run-tests.py -c test-s*
26 # 4) serial, coverage, local hg:
26 # 4) serial, coverage, local hg:
27 # ./run-tests.py -c --local test-s* # unsupported
27 # ./run-tests.py -c --local test-s* # unsupported
28 # 5) parallel, no coverage, temp install:
28 # 5) parallel, no coverage, temp install:
29 # ./run-tests.py -j2 test-s*
29 # ./run-tests.py -j2 test-s*
30 # 6) parallel, no coverage, local hg:
30 # 6) parallel, no coverage, local hg:
31 # ./run-tests.py -j2 --local test-s*
31 # ./run-tests.py -j2 --local test-s*
32 # 7) parallel, coverage, temp install:
32 # 7) parallel, coverage, temp install:
33 # ./run-tests.py -j2 -c test-s* # currently broken
33 # ./run-tests.py -j2 -c test-s* # currently broken
34 # 8) parallel, coverage, local install:
34 # 8) parallel, coverage, local install:
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
36 # 9) parallel, custom tmp dir:
36 # 9) parallel, custom tmp dir:
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
38 #
38 #
39 # (You could use any subset of the tests: test-s* happens to match
39 # (You could use any subset of the tests: test-s* happens to match
40 # enough that it's worth doing parallel runs, few enough that it
40 # enough that it's worth doing parallel runs, few enough that it
41 # completes fairly quickly, includes both shell and Python scripts, and
41 # completes fairly quickly, includes both shell and Python scripts, and
42 # includes some scripts that run daemon processes.)
42 # includes some scripts that run daemon processes.)
43
43
44 from distutils import version
44 from distutils import version
45 import difflib
45 import difflib
46 import errno
46 import errno
47 import optparse
47 import optparse
48 import os
48 import os
49 import shutil
49 import shutil
50 import subprocess
50 import subprocess
51 import signal
51 import signal
52 import sys
52 import sys
53 import tempfile
53 import tempfile
54 import time
54 import time
55 import re
55 import re
56 import threading
56 import threading
57
57
58 processlock = threading.Lock()
58 processlock = threading.Lock()
59
59
60 closefds = os.name == 'posix'
60 closefds = os.name == 'posix'
61 def Popen4(cmd, wd, timeout):
61 def Popen4(cmd, wd, timeout):
62 processlock.acquire()
62 processlock.acquire()
63 p = subprocess.Popen(cmd, shell=True, bufsize=-1, cwd=wd,
63 p = subprocess.Popen(cmd, shell=True, bufsize=-1, cwd=wd,
64 close_fds=closefds,
64 close_fds=closefds,
65 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
65 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
66 stderr=subprocess.STDOUT)
66 stderr=subprocess.STDOUT)
67 processlock.release()
67 processlock.release()
68
68
69 p.fromchild = p.stdout
69 p.fromchild = p.stdout
70 p.tochild = p.stdin
70 p.tochild = p.stdin
71 p.childerr = p.stderr
71 p.childerr = p.stderr
72
72
73 p.timeout = False
73 p.timeout = False
74 if timeout:
74 if timeout:
75 def t():
75 def t():
76 start = time.time()
76 start = time.time()
77 while time.time() - start < timeout and p.returncode is None:
77 while time.time() - start < timeout and p.returncode is None:
78 time.sleep(.1)
78 time.sleep(.1)
79 p.timeout = True
79 p.timeout = True
80 if p.returncode is None:
80 if p.returncode is None:
81 terminate(p)
81 terminate(p)
82 threading.Thread(target=t).start()
82 threading.Thread(target=t).start()
83
83
84 return p
84 return p
85
85
86 # reserved exit code to skip test (used by hghave)
86 # reserved exit code to skip test (used by hghave)
87 SKIPPED_STATUS = 80
87 SKIPPED_STATUS = 80
88 SKIPPED_PREFIX = 'skipped: '
88 SKIPPED_PREFIX = 'skipped: '
89 FAILED_PREFIX = 'hghave check failed: '
89 FAILED_PREFIX = 'hghave check failed: '
90 PYTHON = sys.executable.replace('\\', '/')
90 PYTHON = sys.executable.replace('\\', '/')
91 IMPL_PATH = 'PYTHONPATH'
91 IMPL_PATH = 'PYTHONPATH'
92 if 'java' in sys.platform:
92 if 'java' in sys.platform:
93 IMPL_PATH = 'JYTHONPATH'
93 IMPL_PATH = 'JYTHONPATH'
94
94
95 requiredtools = ["python", "diff", "grep", "unzip", "gunzip", "bunzip2", "sed"]
95 requiredtools = ["python", "diff", "grep", "unzip", "gunzip", "bunzip2", "sed"]
96
96
97 defaults = {
97 defaults = {
98 'jobs': ('HGTEST_JOBS', 1),
98 'jobs': ('HGTEST_JOBS', 1),
99 'timeout': ('HGTEST_TIMEOUT', 180),
99 'timeout': ('HGTEST_TIMEOUT', 180),
100 'port': ('HGTEST_PORT', 20059),
100 'port': ('HGTEST_PORT', 20059),
101 'shell': ('HGTEST_SHELL', 'sh'),
101 'shell': ('HGTEST_SHELL', 'sh'),
102 }
102 }
103
103
104 def parselistfiles(files, listtype, warn=True):
104 def parselistfiles(files, listtype, warn=True):
105 entries = dict()
105 entries = dict()
106 for filename in files:
106 for filename in files:
107 try:
107 try:
108 path = os.path.expanduser(os.path.expandvars(filename))
108 path = os.path.expanduser(os.path.expandvars(filename))
109 f = open(path, "r")
109 f = open(path, "r")
110 except IOError, err:
110 except IOError, err:
111 if err.errno != errno.ENOENT:
111 if err.errno != errno.ENOENT:
112 raise
112 raise
113 if warn:
113 if warn:
114 print "warning: no such %s file: %s" % (listtype, filename)
114 print "warning: no such %s file: %s" % (listtype, filename)
115 continue
115 continue
116
116
117 for line in f.readlines():
117 for line in f.readlines():
118 line = line.split('#', 1)[0].strip()
118 line = line.split('#', 1)[0].strip()
119 if line:
119 if line:
120 entries[line] = filename
120 entries[line] = filename
121
121
122 f.close()
122 f.close()
123 return entries
123 return entries
124
124
125 def parseargs():
125 def parseargs():
126 parser = optparse.OptionParser("%prog [options] [tests]")
126 parser = optparse.OptionParser("%prog [options] [tests]")
127
127
128 # keep these sorted
128 # keep these sorted
129 parser.add_option("--blacklist", action="append",
129 parser.add_option("--blacklist", action="append",
130 help="skip tests listed in the specified blacklist file")
130 help="skip tests listed in the specified blacklist file")
131 parser.add_option("--whitelist", action="append",
131 parser.add_option("--whitelist", action="append",
132 help="always run tests listed in the specified whitelist file")
132 help="always run tests listed in the specified whitelist file")
133 parser.add_option("-C", "--annotate", action="store_true",
133 parser.add_option("-C", "--annotate", action="store_true",
134 help="output files annotated with coverage")
134 help="output files annotated with coverage")
135 parser.add_option("--child", type="int",
135 parser.add_option("--child", type="int",
136 help="run as child process, summary to given fd")
136 help="run as child process, summary to given fd")
137 parser.add_option("-c", "--cover", action="store_true",
137 parser.add_option("-c", "--cover", action="store_true",
138 help="print a test coverage report")
138 help="print a test coverage report")
139 parser.add_option("-d", "--debug", action="store_true",
139 parser.add_option("-d", "--debug", action="store_true",
140 help="debug mode: write output of test scripts to console"
140 help="debug mode: write output of test scripts to console"
141 " rather than capturing and diff'ing it (disables timeout)")
141 " rather than capturing and diff'ing it (disables timeout)")
142 parser.add_option("-f", "--first", action="store_true",
142 parser.add_option("-f", "--first", action="store_true",
143 help="exit on the first test failure")
143 help="exit on the first test failure")
144 parser.add_option("-H", "--htmlcov", action="store_true",
144 parser.add_option("-H", "--htmlcov", action="store_true",
145 help="create an HTML report of the coverage of the files")
145 help="create an HTML report of the coverage of the files")
146 parser.add_option("--inotify", action="store_true",
146 parser.add_option("--inotify", action="store_true",
147 help="enable inotify extension when running tests")
147 help="enable inotify extension when running tests")
148 parser.add_option("-i", "--interactive", action="store_true",
148 parser.add_option("-i", "--interactive", action="store_true",
149 help="prompt to accept changed output")
149 help="prompt to accept changed output")
150 parser.add_option("-j", "--jobs", type="int",
150 parser.add_option("-j", "--jobs", type="int",
151 help="number of jobs to run in parallel"
151 help="number of jobs to run in parallel"
152 " (default: $%s or %d)" % defaults['jobs'])
152 " (default: $%s or %d)" % defaults['jobs'])
153 parser.add_option("--keep-tmpdir", action="store_true",
153 parser.add_option("--keep-tmpdir", action="store_true",
154 help="keep temporary directory after running tests")
154 help="keep temporary directory after running tests")
155 parser.add_option("-k", "--keywords",
155 parser.add_option("-k", "--keywords",
156 help="run tests matching keywords")
156 help="run tests matching keywords")
157 parser.add_option("-l", "--local", action="store_true",
157 parser.add_option("-l", "--local", action="store_true",
158 help="shortcut for --with-hg=<testdir>/../hg")
158 help="shortcut for --with-hg=<testdir>/../hg")
159 parser.add_option("-n", "--nodiff", action="store_true",
159 parser.add_option("-n", "--nodiff", action="store_true",
160 help="skip showing test changes")
160 help="skip showing test changes")
161 parser.add_option("-p", "--port", type="int",
161 parser.add_option("-p", "--port", type="int",
162 help="port on which servers should listen"
162 help="port on which servers should listen"
163 " (default: $%s or %d)" % defaults['port'])
163 " (default: $%s or %d)" % defaults['port'])
164 parser.add_option("--pure", action="store_true",
164 parser.add_option("--pure", action="store_true",
165 help="use pure Python code instead of C extensions")
165 help="use pure Python code instead of C extensions")
166 parser.add_option("-R", "--restart", action="store_true",
166 parser.add_option("-R", "--restart", action="store_true",
167 help="restart at last error")
167 help="restart at last error")
168 parser.add_option("-r", "--retest", action="store_true",
168 parser.add_option("-r", "--retest", action="store_true",
169 help="retest failed tests")
169 help="retest failed tests")
170 parser.add_option("-S", "--noskips", action="store_true",
170 parser.add_option("-S", "--noskips", action="store_true",
171 help="don't report skip tests verbosely")
171 help="don't report skip tests verbosely")
172 parser.add_option("--shell", type="string",
172 parser.add_option("--shell", type="string",
173 help="shell to use (default: $%s or %s)" % defaults['shell'])
173 help="shell to use (default: $%s or %s)" % defaults['shell'])
174 parser.add_option("-t", "--timeout", type="int",
174 parser.add_option("-t", "--timeout", type="int",
175 help="kill errant tests after TIMEOUT seconds"
175 help="kill errant tests after TIMEOUT seconds"
176 " (default: $%s or %d)" % defaults['timeout'])
176 " (default: $%s or %d)" % defaults['timeout'])
177 parser.add_option("--tmpdir", type="string",
177 parser.add_option("--tmpdir", type="string",
178 help="run tests in the given temporary directory"
178 help="run tests in the given temporary directory"
179 " (implies --keep-tmpdir)")
179 " (implies --keep-tmpdir)")
180 parser.add_option("-v", "--verbose", action="store_true",
180 parser.add_option("-v", "--verbose", action="store_true",
181 help="output verbose messages")
181 help="output verbose messages")
182 parser.add_option("--view", type="string",
182 parser.add_option("--view", type="string",
183 help="external diff viewer")
183 help="external diff viewer")
184 parser.add_option("--with-hg", type="string",
184 parser.add_option("--with-hg", type="string",
185 metavar="HG",
185 metavar="HG",
186 help="test using specified hg script rather than a "
186 help="test using specified hg script rather than a "
187 "temporary installation")
187 "temporary installation")
188 parser.add_option("-3", "--py3k-warnings", action="store_true",
188 parser.add_option("-3", "--py3k-warnings", action="store_true",
189 help="enable Py3k warnings on Python 2.6+")
189 help="enable Py3k warnings on Python 2.6+")
190 parser.add_option('--extra-config-opt', action="append",
190 parser.add_option('--extra-config-opt', action="append",
191 help='set the given config opt in the test hgrc')
191 help='set the given config opt in the test hgrc')
192
192
193 for option, (envvar, default) in defaults.items():
193 for option, (envvar, default) in defaults.items():
194 defaults[option] = type(default)(os.environ.get(envvar, default))
194 defaults[option] = type(default)(os.environ.get(envvar, default))
195 parser.set_defaults(**defaults)
195 parser.set_defaults(**defaults)
196 (options, args) = parser.parse_args()
196 (options, args) = parser.parse_args()
197
197
198 # jython is always pure
198 # jython is always pure
199 if 'java' in sys.platform or '__pypy__' in sys.modules:
199 if 'java' in sys.platform or '__pypy__' in sys.modules:
200 options.pure = True
200 options.pure = True
201
201
202 if options.with_hg:
202 if options.with_hg:
203 options.with_hg = os.path.expanduser(options.with_hg)
203 options.with_hg = os.path.expanduser(options.with_hg)
204 if not (os.path.isfile(options.with_hg) and
204 if not (os.path.isfile(options.with_hg) and
205 os.access(options.with_hg, os.X_OK)):
205 os.access(options.with_hg, os.X_OK)):
206 parser.error('--with-hg must specify an executable hg script')
206 parser.error('--with-hg must specify an executable hg script')
207 if not os.path.basename(options.with_hg) == 'hg':
207 if not os.path.basename(options.with_hg) == 'hg':
208 sys.stderr.write('warning: --with-hg should specify an hg script\n')
208 sys.stderr.write('warning: --with-hg should specify an hg script\n')
209 if options.local:
209 if options.local:
210 testdir = os.path.dirname(os.path.realpath(sys.argv[0]))
210 testdir = os.path.dirname(os.path.realpath(sys.argv[0]))
211 hgbin = os.path.join(os.path.dirname(testdir), 'hg')
211 hgbin = os.path.join(os.path.dirname(testdir), 'hg')
212 if os.name != 'nt' and not os.access(hgbin, os.X_OK):
212 if os.name != 'nt' and not os.access(hgbin, os.X_OK):
213 parser.error('--local specified, but %r not found or not executable'
213 parser.error('--local specified, but %r not found or not executable'
214 % hgbin)
214 % hgbin)
215 options.with_hg = hgbin
215 options.with_hg = hgbin
216
216
217 options.anycoverage = options.cover or options.annotate or options.htmlcov
217 options.anycoverage = options.cover or options.annotate or options.htmlcov
218 if options.anycoverage:
218 if options.anycoverage:
219 try:
219 try:
220 import coverage
220 import coverage
221 covver = version.StrictVersion(coverage.__version__).version
221 covver = version.StrictVersion(coverage.__version__).version
222 if covver < (3, 3):
222 if covver < (3, 3):
223 parser.error('coverage options require coverage 3.3 or later')
223 parser.error('coverage options require coverage 3.3 or later')
224 except ImportError:
224 except ImportError:
225 parser.error('coverage options now require the coverage package')
225 parser.error('coverage options now require the coverage package')
226
226
227 if options.anycoverage and options.local:
227 if options.anycoverage and options.local:
228 # this needs some path mangling somewhere, I guess
228 # this needs some path mangling somewhere, I guess
229 parser.error("sorry, coverage options do not work when --local "
229 parser.error("sorry, coverage options do not work when --local "
230 "is specified")
230 "is specified")
231
231
232 global vlog
232 global vlog
233 if options.verbose:
233 if options.verbose:
234 if options.jobs > 1 or options.child is not None:
234 if options.jobs > 1 or options.child is not None:
235 pid = "[%d]" % os.getpid()
235 pid = "[%d]" % os.getpid()
236 else:
236 else:
237 pid = None
237 pid = None
238 def vlog(*msg):
238 def vlog(*msg):
239 iolock.acquire()
239 iolock.acquire()
240 if pid:
240 if pid:
241 print pid,
241 print pid,
242 for m in msg:
242 for m in msg:
243 print m,
243 print m,
244 print
244 print
245 sys.stdout.flush()
245 sys.stdout.flush()
246 iolock.release()
246 iolock.release()
247 else:
247 else:
248 vlog = lambda *msg: None
248 vlog = lambda *msg: None
249
249
250 if options.tmpdir:
250 if options.tmpdir:
251 options.tmpdir = os.path.expanduser(options.tmpdir)
251 options.tmpdir = os.path.expanduser(options.tmpdir)
252
252
253 if options.jobs < 1:
253 if options.jobs < 1:
254 parser.error('--jobs must be positive')
254 parser.error('--jobs must be positive')
255 if options.interactive and options.jobs > 1:
255 if options.interactive and options.jobs > 1:
256 print '(--interactive overrides --jobs)'
256 print '(--interactive overrides --jobs)'
257 options.jobs = 1
257 options.jobs = 1
258 if options.interactive and options.debug:
258 if options.interactive and options.debug:
259 parser.error("-i/--interactive and -d/--debug are incompatible")
259 parser.error("-i/--interactive and -d/--debug are incompatible")
260 if options.debug:
260 if options.debug:
261 if options.timeout != defaults['timeout']:
261 if options.timeout != defaults['timeout']:
262 sys.stderr.write(
262 sys.stderr.write(
263 'warning: --timeout option ignored with --debug\n')
263 'warning: --timeout option ignored with --debug\n')
264 options.timeout = 0
264 options.timeout = 0
265 if options.py3k_warnings:
265 if options.py3k_warnings:
266 if sys.version_info[:2] < (2, 6) or sys.version_info[:2] >= (3, 0):
266 if sys.version_info[:2] < (2, 6) or sys.version_info[:2] >= (3, 0):
267 parser.error('--py3k-warnings can only be used on Python 2.6+')
267 parser.error('--py3k-warnings can only be used on Python 2.6+')
268 if options.blacklist:
268 if options.blacklist:
269 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
269 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
270 if options.whitelist:
270 if options.whitelist:
271 options.whitelisted = parselistfiles(options.whitelist, 'whitelist',
271 options.whitelisted = parselistfiles(options.whitelist, 'whitelist',
272 warn=options.child is None)
272 warn=options.child is None)
273 else:
273 else:
274 options.whitelisted = {}
274 options.whitelisted = {}
275
275
276 return (options, args)
276 return (options, args)
277
277
278 def rename(src, dst):
278 def rename(src, dst):
279 """Like os.rename(), trade atomicity and opened files friendliness
279 """Like os.rename(), trade atomicity and opened files friendliness
280 for existing destination support.
280 for existing destination support.
281 """
281 """
282 shutil.copy(src, dst)
282 shutil.copy(src, dst)
283 os.remove(src)
283 os.remove(src)
284
284
285 def splitnewlines(text):
285 def splitnewlines(text):
286 '''like str.splitlines, but only split on newlines.
286 '''like str.splitlines, but only split on newlines.
287 keep line endings.'''
287 keep line endings.'''
288 i = 0
288 i = 0
289 lines = []
289 lines = []
290 while True:
290 while True:
291 n = text.find('\n', i)
291 n = text.find('\n', i)
292 if n == -1:
292 if n == -1:
293 last = text[i:]
293 last = text[i:]
294 if last:
294 if last:
295 lines.append(last)
295 lines.append(last)
296 return lines
296 return lines
297 lines.append(text[i:n + 1])
297 lines.append(text[i:n + 1])
298 i = n + 1
298 i = n + 1
299
299
300 def parsehghaveoutput(lines):
300 def parsehghaveoutput(lines):
301 '''Parse hghave log lines.
301 '''Parse hghave log lines.
302 Return tuple of lists (missing, failed):
302 Return tuple of lists (missing, failed):
303 * the missing/unknown features
303 * the missing/unknown features
304 * the features for which existence check failed'''
304 * the features for which existence check failed'''
305 missing = []
305 missing = []
306 failed = []
306 failed = []
307 for line in lines:
307 for line in lines:
308 if line.startswith(SKIPPED_PREFIX):
308 if line.startswith(SKIPPED_PREFIX):
309 line = line.splitlines()[0]
309 line = line.splitlines()[0]
310 missing.append(line[len(SKIPPED_PREFIX):])
310 missing.append(line[len(SKIPPED_PREFIX):])
311 elif line.startswith(FAILED_PREFIX):
311 elif line.startswith(FAILED_PREFIX):
312 line = line.splitlines()[0]
312 line = line.splitlines()[0]
313 failed.append(line[len(FAILED_PREFIX):])
313 failed.append(line[len(FAILED_PREFIX):])
314
314
315 return missing, failed
315 return missing, failed
316
316
317 def showdiff(expected, output, ref, err):
317 def showdiff(expected, output, ref, err):
318 print
318 print
319 for line in difflib.unified_diff(expected, output, ref, err):
319 for line in difflib.unified_diff(expected, output, ref, err):
320 sys.stdout.write(line)
320 sys.stdout.write(line)
321
321
322 def findprogram(program):
322 def findprogram(program):
323 """Search PATH for a executable program"""
323 """Search PATH for a executable program"""
324 for p in os.environ.get('PATH', os.defpath).split(os.pathsep):
324 for p in os.environ.get('PATH', os.defpath).split(os.pathsep):
325 name = os.path.join(p, program)
325 name = os.path.join(p, program)
326 if os.name == 'nt' or os.access(name, os.X_OK):
326 if os.name == 'nt' or os.access(name, os.X_OK):
327 return name
327 return name
328 return None
328 return None
329
329
330 def checktools():
330 def checktools():
331 # Before we go any further, check for pre-requisite tools
331 # Before we go any further, check for pre-requisite tools
332 # stuff from coreutils (cat, rm, etc) are not tested
332 # stuff from coreutils (cat, rm, etc) are not tested
333 for p in requiredtools:
333 for p in requiredtools:
334 if os.name == 'nt':
334 if os.name == 'nt':
335 p += '.exe'
335 p += '.exe'
336 found = findprogram(p)
336 found = findprogram(p)
337 if found:
337 if found:
338 vlog("# Found prerequisite", p, "at", found)
338 vlog("# Found prerequisite", p, "at", found)
339 else:
339 else:
340 print "WARNING: Did not find prerequisite tool: "+p
340 print "WARNING: Did not find prerequisite tool: "+p
341
341
342 def terminate(proc):
342 def terminate(proc):
343 """Terminate subprocess (with fallback for Python versions < 2.6)"""
343 """Terminate subprocess (with fallback for Python versions < 2.6)"""
344 vlog('# Terminating process %d' % proc.pid)
344 vlog('# Terminating process %d' % proc.pid)
345 try:
345 try:
346 getattr(proc, 'terminate', lambda : os.kill(proc.pid, signal.SIGTERM))()
346 getattr(proc, 'terminate', lambda : os.kill(proc.pid, signal.SIGTERM))()
347 except OSError:
347 except OSError:
348 pass
348 pass
349
349
350 def killdaemons():
350 def killdaemons():
351 # Kill off any leftover daemon processes
351 # Kill off any leftover daemon processes
352 try:
352 try:
353 fp = open(DAEMON_PIDS)
353 fp = open(DAEMON_PIDS)
354 for line in fp:
354 for line in fp:
355 try:
355 try:
356 pid = int(line)
356 pid = int(line)
357 except ValueError:
357 except ValueError:
358 continue
358 continue
359 try:
359 try:
360 os.kill(pid, 0)
360 os.kill(pid, 0)
361 vlog('# Killing daemon process %d' % pid)
361 vlog('# Killing daemon process %d' % pid)
362 os.kill(pid, signal.SIGTERM)
362 os.kill(pid, signal.SIGTERM)
363 time.sleep(0.1)
363 time.sleep(0.1)
364 os.kill(pid, 0)
364 os.kill(pid, 0)
365 vlog('# Daemon process %d is stuck - really killing it' % pid)
365 vlog('# Daemon process %d is stuck - really killing it' % pid)
366 os.kill(pid, signal.SIGKILL)
366 os.kill(pid, signal.SIGKILL)
367 except OSError, err:
367 except OSError, err:
368 if err.errno != errno.ESRCH:
368 if err.errno != errno.ESRCH:
369 raise
369 raise
370 fp.close()
370 fp.close()
371 os.unlink(DAEMON_PIDS)
371 os.unlink(DAEMON_PIDS)
372 except IOError:
372 except IOError:
373 pass
373 pass
374
374
375 def cleanup(options):
375 def cleanup(options):
376 if not options.keep_tmpdir:
376 if not options.keep_tmpdir:
377 vlog("# Cleaning up HGTMP", HGTMP)
377 vlog("# Cleaning up HGTMP", HGTMP)
378 shutil.rmtree(HGTMP, True)
378 shutil.rmtree(HGTMP, True)
379
379
380 def usecorrectpython():
380 def usecorrectpython():
381 # some tests run python interpreter. they must use same
381 # some tests run python interpreter. they must use same
382 # interpreter we use or bad things will happen.
382 # interpreter we use or bad things will happen.
383 exedir, exename = os.path.split(sys.executable)
383 exedir, exename = os.path.split(sys.executable)
384 if exename in ('python', 'python.exe'):
384 if exename in ('python', 'python.exe'):
385 path = findprogram(exename)
385 path = findprogram(exename)
386 if os.path.dirname(path) == exedir:
386 if os.path.dirname(path) == exedir:
387 return
387 return
388 else:
388 else:
389 exename = 'python'
389 exename = 'python'
390 vlog('# Making python executable in test path use correct Python')
390 vlog('# Making python executable in test path use correct Python')
391 mypython = os.path.join(BINDIR, exename)
391 mypython = os.path.join(BINDIR, exename)
392 try:
392 try:
393 os.symlink(sys.executable, mypython)
393 os.symlink(sys.executable, mypython)
394 except AttributeError:
394 except AttributeError:
395 # windows fallback
395 # windows fallback
396 shutil.copyfile(sys.executable, mypython)
396 shutil.copyfile(sys.executable, mypython)
397 shutil.copymode(sys.executable, mypython)
397 shutil.copymode(sys.executable, mypython)
398
398
399 def installhg(options):
399 def installhg(options):
400 vlog("# Performing temporary installation of HG")
400 vlog("# Performing temporary installation of HG")
401 installerrs = os.path.join("tests", "install.err")
401 installerrs = os.path.join("tests", "install.err")
402 pure = options.pure and "--pure" or ""
402 pure = options.pure and "--pure" or ""
403
403
404 # Run installer in hg root
404 # Run installer in hg root
405 script = os.path.realpath(sys.argv[0])
405 script = os.path.realpath(sys.argv[0])
406 hgroot = os.path.dirname(os.path.dirname(script))
406 hgroot = os.path.dirname(os.path.dirname(script))
407 os.chdir(hgroot)
407 os.chdir(hgroot)
408 nohome = '--home=""'
408 nohome = '--home=""'
409 if os.name == 'nt':
409 if os.name == 'nt':
410 # The --home="" trick works only on OS where os.sep == '/'
410 # The --home="" trick works only on OS where os.sep == '/'
411 # because of a distutils convert_path() fast-path. Avoid it at
411 # because of a distutils convert_path() fast-path. Avoid it at
412 # least on Windows for now, deal with .pydistutils.cfg bugs
412 # least on Windows for now, deal with .pydistutils.cfg bugs
413 # when they happen.
413 # when they happen.
414 nohome = ''
414 nohome = ''
415 cmd = ('%s setup.py %s clean --all'
415 cmd = ('%s setup.py %s clean --all'
416 ' build --build-base="%s"'
416 ' build --build-base="%s"'
417 ' install --force --prefix="%s" --install-lib="%s"'
417 ' install --force --prefix="%s" --install-lib="%s"'
418 ' --install-scripts="%s" %s >%s 2>&1'
418 ' --install-scripts="%s" %s >%s 2>&1'
419 % (sys.executable, pure, os.path.join(HGTMP, "build"),
419 % (sys.executable, pure, os.path.join(HGTMP, "build"),
420 INST, PYTHONDIR, BINDIR, nohome, installerrs))
420 INST, PYTHONDIR, BINDIR, nohome, installerrs))
421 vlog("# Running", cmd)
421 vlog("# Running", cmd)
422 if os.system(cmd) == 0:
422 if os.system(cmd) == 0:
423 if not options.verbose:
423 if not options.verbose:
424 os.remove(installerrs)
424 os.remove(installerrs)
425 else:
425 else:
426 f = open(installerrs)
426 f = open(installerrs)
427 for line in f:
427 for line in f:
428 print line,
428 print line,
429 f.close()
429 f.close()
430 sys.exit(1)
430 sys.exit(1)
431 os.chdir(TESTDIR)
431 os.chdir(TESTDIR)
432
432
433 usecorrectpython()
433 usecorrectpython()
434
434
435 vlog("# Installing dummy diffstat")
435 vlog("# Installing dummy diffstat")
436 f = open(os.path.join(BINDIR, 'diffstat'), 'w')
436 f = open(os.path.join(BINDIR, 'diffstat'), 'w')
437 f.write('#!' + sys.executable + '\n'
437 f.write('#!' + sys.executable + '\n'
438 'import sys\n'
438 'import sys\n'
439 'files = 0\n'
439 'files = 0\n'
440 'for line in sys.stdin:\n'
440 'for line in sys.stdin:\n'
441 ' if line.startswith("diff "):\n'
441 ' if line.startswith("diff "):\n'
442 ' files += 1\n'
442 ' files += 1\n'
443 'sys.stdout.write("files patched: %d\\n" % files)\n')
443 'sys.stdout.write("files patched: %d\\n" % files)\n')
444 f.close()
444 f.close()
445 os.chmod(os.path.join(BINDIR, 'diffstat'), 0700)
445 os.chmod(os.path.join(BINDIR, 'diffstat'), 0700)
446
446
447 if options.py3k_warnings and not options.anycoverage:
447 if options.py3k_warnings and not options.anycoverage:
448 vlog("# Updating hg command to enable Py3k Warnings switch")
448 vlog("# Updating hg command to enable Py3k Warnings switch")
449 f = open(os.path.join(BINDIR, 'hg'), 'r')
449 f = open(os.path.join(BINDIR, 'hg'), 'r')
450 lines = [line.rstrip() for line in f]
450 lines = [line.rstrip() for line in f]
451 lines[0] += ' -3'
451 lines[0] += ' -3'
452 f.close()
452 f.close()
453 f = open(os.path.join(BINDIR, 'hg'), 'w')
453 f = open(os.path.join(BINDIR, 'hg'), 'w')
454 for line in lines:
454 for line in lines:
455 f.write(line + '\n')
455 f.write(line + '\n')
456 f.close()
456 f.close()
457
457
458 hgbat = os.path.join(BINDIR, 'hg.bat')
458 hgbat = os.path.join(BINDIR, 'hg.bat')
459 if os.path.isfile(hgbat):
459 if os.path.isfile(hgbat):
460 # hg.bat expects to be put in bin/scripts while run-tests.py
460 # hg.bat expects to be put in bin/scripts while run-tests.py
461 # installation layout put it in bin/ directly. Fix it
461 # installation layout put it in bin/ directly. Fix it
462 f = open(hgbat, 'rb')
462 f = open(hgbat, 'rb')
463 data = f.read()
463 data = f.read()
464 f.close()
464 f.close()
465 if '"%~dp0..\python" "%~dp0hg" %*' in data:
465 if '"%~dp0..\python" "%~dp0hg" %*' in data:
466 data = data.replace('"%~dp0..\python" "%~dp0hg" %*',
466 data = data.replace('"%~dp0..\python" "%~dp0hg" %*',
467 '"%~dp0python" "%~dp0hg" %*')
467 '"%~dp0python" "%~dp0hg" %*')
468 f = open(hgbat, 'wb')
468 f = open(hgbat, 'wb')
469 f.write(data)
469 f.write(data)
470 f.close()
470 f.close()
471 else:
471 else:
472 print 'WARNING: cannot fix hg.bat reference to python.exe'
472 print 'WARNING: cannot fix hg.bat reference to python.exe'
473
473
474 if options.anycoverage:
474 if options.anycoverage:
475 custom = os.path.join(TESTDIR, 'sitecustomize.py')
475 custom = os.path.join(TESTDIR, 'sitecustomize.py')
476 target = os.path.join(PYTHONDIR, 'sitecustomize.py')
476 target = os.path.join(PYTHONDIR, 'sitecustomize.py')
477 vlog('# Installing coverage trigger to %s' % target)
477 vlog('# Installing coverage trigger to %s' % target)
478 shutil.copyfile(custom, target)
478 shutil.copyfile(custom, target)
479 rc = os.path.join(TESTDIR, '.coveragerc')
479 rc = os.path.join(TESTDIR, '.coveragerc')
480 vlog('# Installing coverage rc to %s' % rc)
480 vlog('# Installing coverage rc to %s' % rc)
481 os.environ['COVERAGE_PROCESS_START'] = rc
481 os.environ['COVERAGE_PROCESS_START'] = rc
482 fn = os.path.join(INST, '..', '.coverage')
482 fn = os.path.join(INST, '..', '.coverage')
483 os.environ['COVERAGE_FILE'] = fn
483 os.environ['COVERAGE_FILE'] = fn
484
484
485 def outputcoverage(options):
485 def outputcoverage(options):
486
486
487 vlog('# Producing coverage report')
487 vlog('# Producing coverage report')
488 os.chdir(PYTHONDIR)
488 os.chdir(PYTHONDIR)
489
489
490 def covrun(*args):
490 def covrun(*args):
491 cmd = 'coverage %s' % ' '.join(args)
491 cmd = 'coverage %s' % ' '.join(args)
492 vlog('# Running: %s' % cmd)
492 vlog('# Running: %s' % cmd)
493 os.system(cmd)
493 os.system(cmd)
494
494
495 if options.child:
495 if options.child:
496 return
496 return
497
497
498 covrun('-c')
498 covrun('-c')
499 omit = ','.join(os.path.join(x, '*') for x in [BINDIR, TESTDIR])
499 omit = ','.join(os.path.join(x, '*') for x in [BINDIR, TESTDIR])
500 covrun('-i', '-r', '"--omit=%s"' % omit) # report
500 covrun('-i', '-r', '"--omit=%s"' % omit) # report
501 if options.htmlcov:
501 if options.htmlcov:
502 htmldir = os.path.join(TESTDIR, 'htmlcov')
502 htmldir = os.path.join(TESTDIR, 'htmlcov')
503 covrun('-i', '-b', '"--directory=%s"' % htmldir, '"--omit=%s"' % omit)
503 covrun('-i', '-b', '"--directory=%s"' % htmldir, '"--omit=%s"' % omit)
504 if options.annotate:
504 if options.annotate:
505 adir = os.path.join(TESTDIR, 'annotated')
505 adir = os.path.join(TESTDIR, 'annotated')
506 if not os.path.isdir(adir):
506 if not os.path.isdir(adir):
507 os.mkdir(adir)
507 os.mkdir(adir)
508 covrun('-i', '-a', '"--directory=%s"' % adir, '"--omit=%s"' % omit)
508 covrun('-i', '-a', '"--directory=%s"' % adir, '"--omit=%s"' % omit)
509
509
510 def pytest(test, wd, options, replacements):
510 def pytest(test, wd, options, replacements):
511 py3kswitch = options.py3k_warnings and ' -3' or ''
511 py3kswitch = options.py3k_warnings and ' -3' or ''
512 cmd = '%s%s "%s"' % (PYTHON, py3kswitch, test)
512 cmd = '%s%s "%s"' % (PYTHON, py3kswitch, test)
513 vlog("# Running", cmd)
513 vlog("# Running", cmd)
514 return run(cmd, wd, options, replacements)
514 return run(cmd, wd, options, replacements)
515
515
516 def shtest(test, wd, options, replacements):
516 def shtest(test, wd, options, replacements):
517 cmd = '%s "%s"' % (options.shell, test)
517 cmd = '%s "%s"' % (options.shell, test)
518 vlog("# Running", cmd)
518 vlog("# Running", cmd)
519 return run(cmd, wd, options, replacements)
519 return run(cmd, wd, options, replacements)
520
520
521 needescape = re.compile(r'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
521 needescape = re.compile(r'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
522 escapesub = re.compile(r'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
522 escapesub = re.compile(r'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
523 escapemap = dict((chr(i), r'\x%02x' % i) for i in range(256))
523 escapemap = dict((chr(i), r'\x%02x' % i) for i in range(256))
524 escapemap.update({'\\': '\\\\', '\r': r'\r'})
524 escapemap.update({'\\': '\\\\', '\r': r'\r'})
525 def escapef(m):
525 def escapef(m):
526 return escapemap[m.group(0)]
526 return escapemap[m.group(0)]
527 def stringescape(s):
527 def stringescape(s):
528 return escapesub(escapef, s)
528 return escapesub(escapef, s)
529
529
530 def rematch(el, l):
530 def rematch(el, l):
531 try:
531 try:
532 # ensure that the regex matches to the end of the string
532 # ensure that the regex matches to the end of the string
533 return re.match(el + r'\Z', l)
533 return re.match(el + r'\Z', l)
534 except re.error:
534 except re.error:
535 # el is an invalid regex
535 # el is an invalid regex
536 return False
536 return False
537
537
538 def globmatch(el, l):
538 def globmatch(el, l):
539 # The only supported special characters are * and ? plus / which also
539 # The only supported special characters are * and ? plus / which also
540 # matches \ on windows. Escaping of these caracters is supported.
540 # matches \ on windows. Escaping of these caracters is supported.
541 i, n = 0, len(el)
541 i, n = 0, len(el)
542 res = ''
542 res = ''
543 while i < n:
543 while i < n:
544 c = el[i]
544 c = el[i]
545 i += 1
545 i += 1
546 if c == '\\' and el[i] in '*?\\/':
546 if c == '\\' and el[i] in '*?\\/':
547 res += el[i - 1:i + 1]
547 res += el[i - 1:i + 1]
548 i += 1
548 i += 1
549 elif c == '*':
549 elif c == '*':
550 res += '.*'
550 res += '.*'
551 elif c == '?':
551 elif c == '?':
552 res += '.'
552 res += '.'
553 elif c == '/' and os.name == 'nt':
553 elif c == '/' and os.name == 'nt':
554 res += '[/\\\\]'
554 res += '[/\\\\]'
555 else:
555 else:
556 res += re.escape(c)
556 res += re.escape(c)
557 return rematch(res, l)
557 return rematch(res, l)
558
558
559 def linematch(el, l):
559 def linematch(el, l):
560 if el == l: # perfect match (fast)
560 if el == l: # perfect match (fast)
561 return True
561 return True
562 if (el and
562 if (el and
563 (el.endswith(" (re)\n") and rematch(el[:-6] + '\n', l) or
563 (el.endswith(" (re)\n") and rematch(el[:-6] + '\n', l) or
564 el.endswith(" (glob)\n") and globmatch(el[:-8] + '\n', l) or
564 el.endswith(" (glob)\n") and globmatch(el[:-8] + '\n', l) or
565 el.endswith(" (esc)\n") and
565 el.endswith(" (esc)\n") and
566 (el[:-7].decode('string-escape') + '\n' == l or
566 (el[:-7].decode('string-escape') + '\n' == l or
567 el[:-7].decode('string-escape').replace('\r', '') +
567 el[:-7].decode('string-escape').replace('\r', '') +
568 '\n' == l and os.name == 'nt'))):
568 '\n' == l and os.name == 'nt'))):
569 return True
569 return True
570 return False
570 return False
571
571
572 def tsttest(test, wd, options, replacements):
572 def tsttest(test, wd, options, replacements):
573 # We generate a shell script which outputs unique markers to line
573 # We generate a shell script which outputs unique markers to line
574 # up script results with our source. These markers include input
574 # up script results with our source. These markers include input
575 # line number and the last return code
575 # line number and the last return code
576 salt = "SALT" + str(time.time())
576 salt = "SALT" + str(time.time())
577 def addsalt(line, inpython):
577 def addsalt(line, inpython):
578 if inpython:
578 if inpython:
579 script.append('%s %d 0\n' % (salt, line))
579 script.append('%s %d 0\n' % (salt, line))
580 else:
580 else:
581 script.append('echo %s %s $?\n' % (salt, line))
581 script.append('echo %s %s $?\n' % (salt, line))
582
582
583 # After we run the shell script, we re-unify the script output
583 # After we run the shell script, we re-unify the script output
584 # with non-active parts of the source, with synchronization by our
584 # with non-active parts of the source, with synchronization by our
585 # SALT line number markers. The after table contains the
585 # SALT line number markers. The after table contains the
586 # non-active components, ordered by line number
586 # non-active components, ordered by line number
587 after = {}
587 after = {}
588 pos = prepos = -1
588 pos = prepos = -1
589
589
590 # Expected shellscript output
590 # Expected shellscript output
591 expected = {}
591 expected = {}
592
592
593 # We keep track of whether or not we're in a Python block so we
593 # We keep track of whether or not we're in a Python block so we
594 # can generate the surrounding doctest magic
594 # can generate the surrounding doctest magic
595 inpython = False
595 inpython = False
596
596
597 f = open(test)
597 f = open(test)
598 t = f.readlines()
598 t = f.readlines()
599 f.close()
599 f.close()
600
600
601 script = []
601 script = []
602 if options.debug:
602 if options.debug:
603 script.append('set -x\n')
603 script.append('set -x\n')
604 if os.getenv('MSYSTEM'):
604 if os.getenv('MSYSTEM'):
605 script.append('alias pwd="pwd -W"\n')
605 script.append('alias pwd="pwd -W"\n')
606 for n, l in enumerate(t):
606 for n, l in enumerate(t):
607 if not l.endswith('\n'):
607 if not l.endswith('\n'):
608 l += '\n'
608 l += '\n'
609 if l.startswith(' >>> '): # python inlines
609 if l.startswith(' >>> '): # python inlines
610 after.setdefault(pos, []).append(l)
610 after.setdefault(pos, []).append(l)
611 prepos = pos
611 prepos = pos
612 pos = n
612 pos = n
613 if not inpython:
613 if not inpython:
614 # we've just entered a Python block, add the header
614 # we've just entered a Python block, add the header
615 inpython = True
615 inpython = True
616 addsalt(prepos, False) # make sure we report the exit code
616 addsalt(prepos, False) # make sure we report the exit code
617 script.append('%s -m heredoctest <<EOF\n' % PYTHON)
617 script.append('%s -m heredoctest <<EOF\n' % PYTHON)
618 addsalt(n, True)
618 addsalt(n, True)
619 script.append(l[2:])
619 script.append(l[2:])
620 if l.startswith(' ... '): # python inlines
620 if l.startswith(' ... '): # python inlines
621 after.setdefault(prepos, []).append(l)
621 after.setdefault(prepos, []).append(l)
622 script.append(l[2:])
622 script.append(l[2:])
623 elif l.startswith(' $ '): # commands
623 elif l.startswith(' $ '): # commands
624 if inpython:
624 if inpython:
625 script.append("EOF\n")
625 script.append("EOF\n")
626 inpython = False
626 inpython = False
627 after.setdefault(pos, []).append(l)
627 after.setdefault(pos, []).append(l)
628 prepos = pos
628 prepos = pos
629 pos = n
629 pos = n
630 addsalt(n, False)
630 addsalt(n, False)
631 script.append(l[4:])
631 script.append(l[4:])
632 elif l.startswith(' > '): # continuations
632 elif l.startswith(' > '): # continuations
633 after.setdefault(prepos, []).append(l)
633 after.setdefault(prepos, []).append(l)
634 script.append(l[4:])
634 script.append(l[4:])
635 elif l.startswith(' '): # results
635 elif l.startswith(' '): # results
636 # queue up a list of expected results
636 # queue up a list of expected results
637 expected.setdefault(pos, []).append(l[2:])
637 expected.setdefault(pos, []).append(l[2:])
638 else:
638 else:
639 if inpython:
639 if inpython:
640 script.append("EOF\n")
640 script.append("EOF\n")
641 inpython = False
641 inpython = False
642 # non-command/result - queue up for merged output
642 # non-command/result - queue up for merged output
643 after.setdefault(pos, []).append(l)
643 after.setdefault(pos, []).append(l)
644
644
645 if inpython:
645 if inpython:
646 script.append("EOF\n")
646 script.append("EOF\n")
647 addsalt(n + 1, False)
647 addsalt(n + 1, False)
648
648
649 # Write out the script and execute it
649 # Write out the script and execute it
650 fd, name = tempfile.mkstemp(suffix='hg-tst')
650 fd, name = tempfile.mkstemp(suffix='hg-tst')
651 try:
651 try:
652 for l in script:
652 for l in script:
653 os.write(fd, l)
653 os.write(fd, l)
654 os.close(fd)
654 os.close(fd)
655
655
656 cmd = '%s "%s"' % (options.shell, name)
656 cmd = '%s "%s"' % (options.shell, name)
657 vlog("# Running", cmd)
657 vlog("# Running", cmd)
658 exitcode, output = run(cmd, wd, options, replacements)
658 exitcode, output = run(cmd, wd, options, replacements)
659 # do not merge output if skipped, return hghave message instead
659 # do not merge output if skipped, return hghave message instead
660 # similarly, with --debug, output is None
660 # similarly, with --debug, output is None
661 if exitcode == SKIPPED_STATUS or output is None:
661 if exitcode == SKIPPED_STATUS or output is None:
662 return exitcode, output
662 return exitcode, output
663 finally:
663 finally:
664 os.remove(name)
664 os.remove(name)
665
665
666 # Merge the script output back into a unified test
666 # Merge the script output back into a unified test
667
667
668 pos = -1
668 pos = -1
669 postout = []
669 postout = []
670 ret = 0
670 ret = 0
671 for n, l in enumerate(output):
671 for n, l in enumerate(output):
672 lout, lcmd = l, None
672 lout, lcmd = l, None
673 if salt in l:
673 if salt in l:
674 lout, lcmd = l.split(salt, 1)
674 lout, lcmd = l.split(salt, 1)
675
675
676 if lout:
676 if lout:
677 if lcmd:
677 if lcmd:
678 # output block had no trailing newline, clean up
678 # output block had no trailing newline, clean up
679 lout += ' (no-eol)\n'
679 lout += ' (no-eol)\n'
680
680
681 # find the expected output at the current position
681 # find the expected output at the current position
682 el = None
682 el = None
683 if pos in expected and expected[pos]:
683 if pos in expected and expected[pos]:
684 el = expected[pos].pop(0)
684 el = expected[pos].pop(0)
685
685
686 if linematch(el, lout):
686 if linematch(el, lout):
687 postout.append(" " + el)
687 postout.append(" " + el)
688 else:
688 else:
689 if needescape(lout):
689 if needescape(lout):
690 lout = stringescape(lout.rstrip('\n')) + " (esc)\n"
690 lout = stringescape(lout.rstrip('\n')) + " (esc)\n"
691 postout.append(" " + lout) # let diff deal with it
691 postout.append(" " + lout) # let diff deal with it
692
692
693 if lcmd:
693 if lcmd:
694 # add on last return code
694 # add on last return code
695 ret = int(lcmd.split()[1])
695 ret = int(lcmd.split()[1])
696 if ret != 0:
696 if ret != 0:
697 postout.append(" [%s]\n" % ret)
697 postout.append(" [%s]\n" % ret)
698 if pos in after:
698 if pos in after:
699 # merge in non-active test bits
699 # merge in non-active test bits
700 postout += after.pop(pos)
700 postout += after.pop(pos)
701 pos = int(lcmd.split()[0])
701 pos = int(lcmd.split()[0])
702
702
703 if pos in after:
703 if pos in after:
704 postout += after.pop(pos)
704 postout += after.pop(pos)
705
705
706 return exitcode, postout
706 return exitcode, postout
707
707
708 wifexited = getattr(os, "WIFEXITED", lambda x: False)
708 wifexited = getattr(os, "WIFEXITED", lambda x: False)
709 def run(cmd, wd, options, replacements):
709 def run(cmd, wd, options, replacements):
710 """Run command in a sub-process, capturing the output (stdout and stderr).
710 """Run command in a sub-process, capturing the output (stdout and stderr).
711 Return a tuple (exitcode, output). output is None in debug mode."""
711 Return a tuple (exitcode, output). output is None in debug mode."""
712 # TODO: Use subprocess.Popen if we're running on Python 2.4
712 # TODO: Use subprocess.Popen if we're running on Python 2.4
713 if options.debug:
713 if options.debug:
714 proc = subprocess.Popen(cmd, shell=True, cwd=wd)
714 proc = subprocess.Popen(cmd, shell=True, cwd=wd)
715 ret = proc.wait()
715 ret = proc.wait()
716 return (ret, None)
716 return (ret, None)
717
717
718 proc = Popen4(cmd, wd, options.timeout)
718 proc = Popen4(cmd, wd, options.timeout)
719 def cleanup():
719 def cleanup():
720 terminate(proc)
720 terminate(proc)
721 ret = proc.wait()
721 ret = proc.wait()
722 if ret == 0:
722 if ret == 0:
723 ret = signal.SIGTERM << 8
723 ret = signal.SIGTERM << 8
724 killdaemons()
724 killdaemons()
725 return ret
725 return ret
726
726
727 output = ''
727 output = ''
728 proc.tochild.close()
728 proc.tochild.close()
729
729
730 try:
730 try:
731 output = proc.fromchild.read()
731 output = proc.fromchild.read()
732 except KeyboardInterrupt:
732 except KeyboardInterrupt:
733 vlog('# Handling keyboard interrupt')
733 vlog('# Handling keyboard interrupt')
734 cleanup()
734 cleanup()
735 raise
735 raise
736
736
737 ret = proc.wait()
737 ret = proc.wait()
738 if wifexited(ret):
738 if wifexited(ret):
739 ret = os.WEXITSTATUS(ret)
739 ret = os.WEXITSTATUS(ret)
740
740
741 if proc.timeout:
741 if proc.timeout:
742 ret = 'timeout'
742 ret = 'timeout'
743
743
744 if ret:
744 if ret:
745 killdaemons()
745 killdaemons()
746
746
747 for s, r in replacements:
747 for s, r in replacements:
748 output = re.sub(s, r, output)
748 output = re.sub(s, r, output)
749 return ret, splitnewlines(output)
749 return ret, splitnewlines(output)
750
750
751 def runone(options, test):
751 def runone(options, test):
752 '''tristate output:
752 '''tristate output:
753 None -> skipped
753 None -> skipped
754 True -> passed
754 True -> passed
755 False -> failed'''
755 False -> failed'''
756
756
757 global results, resultslock, iolock
757 global results, resultslock, iolock
758
758
759 testpath = os.path.join(TESTDIR, test)
759 testpath = os.path.join(TESTDIR, test)
760
760
761 def result(l, e):
761 def result(l, e):
762 resultslock.acquire()
762 resultslock.acquire()
763 results[l].append(e)
763 results[l].append(e)
764 resultslock.release()
764 resultslock.release()
765
765
766 def skip(msg):
766 def skip(msg):
767 if not options.verbose:
767 if not options.verbose:
768 result('s', (test, msg))
768 result('s', (test, msg))
769 else:
769 else:
770 iolock.acquire()
770 iolock.acquire()
771 print "\nSkipping %s: %s" % (testpath, msg)
771 print "\nSkipping %s: %s" % (testpath, msg)
772 iolock.release()
772 iolock.release()
773 return None
773 return None
774
774
775 def fail(msg, ret):
775 def fail(msg, ret):
776 if not options.nodiff:
776 if not options.nodiff:
777 iolock.acquire()
777 iolock.acquire()
778 print "\nERROR: %s %s" % (testpath, msg)
778 print "\nERROR: %s %s" % (testpath, msg)
779 iolock.release()
779 iolock.release()
780 if (not ret and options.interactive
780 if (not ret and options.interactive
781 and os.path.exists(testpath + ".err")):
781 and os.path.exists(testpath + ".err")):
782 iolock.acquire()
782 iolock.acquire()
783 print "Accept this change? [n] ",
783 print "Accept this change? [n] ",
784 answer = sys.stdin.readline().strip()
784 answer = sys.stdin.readline().strip()
785 iolock.release()
785 iolock.release()
786 if answer.lower() in "y yes".split():
786 if answer.lower() in "y yes".split():
787 if test.endswith(".t"):
787 if test.endswith(".t"):
788 rename(testpath + ".err", testpath)
788 rename(testpath + ".err", testpath)
789 else:
789 else:
790 rename(testpath + ".err", testpath + ".out")
790 rename(testpath + ".err", testpath + ".out")
791 result('p', test)
791 result('p', test)
792 return
792 return
793 result('f', (test, msg))
793 result('f', (test, msg))
794
794
795 def success():
795 def success():
796 result('p', test)
796 result('p', test)
797
797
798 def ignore(msg):
798 def ignore(msg):
799 result('i', (test, msg))
799 result('i', (test, msg))
800
800
801 if (os.path.basename(test).startswith("test-") and '~' not in test and
801 if (os.path.basename(test).startswith("test-") and '~' not in test and
802 ('.' not in test or test.endswith('.py') or
802 ('.' not in test or test.endswith('.py') or
803 test.endswith('.bat') or test.endswith('.t'))):
803 test.endswith('.bat') or test.endswith('.t'))):
804 if not os.path.exists(test):
804 if not os.path.exists(test):
805 skip("doesn't exist")
805 skip("doesn't exist")
806 return None
806 return None
807 else:
807 else:
808 vlog('# Test file', test, 'not supported, ignoring')
808 vlog('# Test file', test, 'not supported, ignoring')
809 return None # not a supported test, don't record
809 return None # not a supported test, don't record
810
810
811 if not (options.whitelisted and test in options.whitelisted):
811 if not (options.whitelisted and test in options.whitelisted):
812 if options.blacklist and test in options.blacklist:
812 if options.blacklist and test in options.blacklist:
813 skip("blacklisted")
813 skip("blacklisted")
814 return None
814 return None
815
815
816 if options.retest and not os.path.exists(test + ".err"):
816 if options.retest and not os.path.exists(test + ".err"):
817 ignore("not retesting")
817 ignore("not retesting")
818 return None
818 return None
819
819
820 if options.keywords:
820 if options.keywords:
821 fp = open(test)
821 fp = open(test)
822 t = fp.read().lower() + test.lower()
822 t = fp.read().lower() + test.lower()
823 fp.close()
823 fp.close()
824 for k in options.keywords.lower().split():
824 for k in options.keywords.lower().split():
825 if k in t:
825 if k in t:
826 break
826 break
827 else:
827 else:
828 ignore("doesn't match keyword")
828 ignore("doesn't match keyword")
829 return None
829 return None
830
830
831 vlog("# Test", test)
831 vlog("# Test", test)
832
832
833 # create a fresh hgrc
833 # create a fresh hgrc
834 hgrc = open(HGRCPATH, 'w+')
834 hgrc = open(HGRCPATH, 'w+')
835 hgrc.write('[ui]\n')
835 hgrc.write('[ui]\n')
836 hgrc.write('slash = True\n')
836 hgrc.write('slash = True\n')
837 hgrc.write('[defaults]\n')
837 hgrc.write('[defaults]\n')
838 hgrc.write('backout = -d "0 0"\n')
838 hgrc.write('backout = -d "0 0"\n')
839 hgrc.write('commit = -d "0 0"\n')
839 hgrc.write('commit = -d "0 0"\n')
840 hgrc.write('tag = -d "0 0"\n')
840 hgrc.write('tag = -d "0 0"\n')
841 if options.inotify:
841 if options.inotify:
842 hgrc.write('[extensions]\n')
842 hgrc.write('[extensions]\n')
843 hgrc.write('inotify=\n')
843 hgrc.write('inotify=\n')
844 hgrc.write('[inotify]\n')
844 hgrc.write('[inotify]\n')
845 hgrc.write('pidfile=%s\n' % DAEMON_PIDS)
845 hgrc.write('pidfile=%s\n' % DAEMON_PIDS)
846 hgrc.write('appendpid=True\n')
846 hgrc.write('appendpid=True\n')
847 if options.extra_config_opt:
847 if options.extra_config_opt:
848 for opt in options.extra_config_opt:
848 for opt in options.extra_config_opt:
849 section, key = opt.split('.', 1)
849 section, key = opt.split('.', 1)
850 assert '=' in key, ('extra config opt %s must '
850 assert '=' in key, ('extra config opt %s must '
851 'have an = for assignment' % opt)
851 'have an = for assignment' % opt)
852 hgrc.write('[%s]\n%s\n' % (section, key))
852 hgrc.write('[%s]\n%s\n' % (section, key))
853 hgrc.close()
853 hgrc.close()
854
854
855 ref = os.path.join(TESTDIR, test+".out")
855 ref = os.path.join(TESTDIR, test+".out")
856 err = os.path.join(TESTDIR, test+".err")
856 err = os.path.join(TESTDIR, test+".err")
857 if os.path.exists(err):
857 if os.path.exists(err):
858 os.remove(err) # Remove any previous output files
858 os.remove(err) # Remove any previous output files
859 try:
859 try:
860 tf = open(testpath)
860 tf = open(testpath)
861 firstline = tf.readline().rstrip()
861 firstline = tf.readline().rstrip()
862 tf.close()
862 tf.close()
863 except:
863 except IOError:
864 firstline = ''
864 firstline = ''
865 lctest = test.lower()
865 lctest = test.lower()
866
866
867 if lctest.endswith('.py') or firstline == '#!/usr/bin/env python':
867 if lctest.endswith('.py') or firstline == '#!/usr/bin/env python':
868 runner = pytest
868 runner = pytest
869 elif lctest.endswith('.t'):
869 elif lctest.endswith('.t'):
870 runner = tsttest
870 runner = tsttest
871 ref = testpath
871 ref = testpath
872 else:
872 else:
873 # do not try to run non-executable programs
873 # do not try to run non-executable programs
874 if not os.access(testpath, os.X_OK):
874 if not os.access(testpath, os.X_OK):
875 return skip("not executable")
875 return skip("not executable")
876 runner = shtest
876 runner = shtest
877
877
878 # Make a tmp subdirectory to work in
878 # Make a tmp subdirectory to work in
879 testtmp = os.environ["TESTTMP"] = os.environ["HOME"] = \
879 testtmp = os.environ["TESTTMP"] = os.environ["HOME"] = \
880 os.path.join(HGTMP, os.path.basename(test)).replace('\\', '/')
880 os.path.join(HGTMP, os.path.basename(test)).replace('\\', '/')
881
881
882 replacements = [
882 replacements = [
883 (r':%s\b' % options.port, ':$HGPORT'),
883 (r':%s\b' % options.port, ':$HGPORT'),
884 (r':%s\b' % (options.port + 1), ':$HGPORT1'),
884 (r':%s\b' % (options.port + 1), ':$HGPORT1'),
885 (r':%s\b' % (options.port + 2), ':$HGPORT2'),
885 (r':%s\b' % (options.port + 2), ':$HGPORT2'),
886 ]
886 ]
887 if os.name == 'nt':
887 if os.name == 'nt':
888 replacements.append((r'\r\n', '\n'))
888 replacements.append((r'\r\n', '\n'))
889 replacements.append(
889 replacements.append(
890 (''.join(c.isalpha() and '[%s%s]' % (c.lower(), c.upper()) or
890 (''.join(c.isalpha() and '[%s%s]' % (c.lower(), c.upper()) or
891 c in '/\\' and r'[/\\]' or
891 c in '/\\' and r'[/\\]' or
892 c.isdigit() and c or
892 c.isdigit() and c or
893 '\\' + c
893 '\\' + c
894 for c in testtmp), '$TESTTMP'))
894 for c in testtmp), '$TESTTMP'))
895 else:
895 else:
896 replacements.append((re.escape(testtmp), '$TESTTMP'))
896 replacements.append((re.escape(testtmp), '$TESTTMP'))
897
897
898 os.mkdir(testtmp)
898 os.mkdir(testtmp)
899 ret, out = runner(testpath, testtmp, options, replacements)
899 ret, out = runner(testpath, testtmp, options, replacements)
900 vlog("# Ret was:", ret)
900 vlog("# Ret was:", ret)
901
901
902 mark = '.'
902 mark = '.'
903
903
904 skipped = (ret == SKIPPED_STATUS)
904 skipped = (ret == SKIPPED_STATUS)
905
905
906 # If we're not in --debug mode and reference output file exists,
906 # If we're not in --debug mode and reference output file exists,
907 # check test output against it.
907 # check test output against it.
908 if options.debug:
908 if options.debug:
909 refout = None # to match "out is None"
909 refout = None # to match "out is None"
910 elif os.path.exists(ref):
910 elif os.path.exists(ref):
911 f = open(ref, "r")
911 f = open(ref, "r")
912 refout = list(splitnewlines(f.read()))
912 refout = list(splitnewlines(f.read()))
913 f.close()
913 f.close()
914 else:
914 else:
915 refout = []
915 refout = []
916
916
917 if (ret != 0 or out != refout) and not skipped and not options.debug:
917 if (ret != 0 or out != refout) and not skipped and not options.debug:
918 # Save errors to a file for diagnosis
918 # Save errors to a file for diagnosis
919 f = open(err, "wb")
919 f = open(err, "wb")
920 for line in out:
920 for line in out:
921 f.write(line)
921 f.write(line)
922 f.close()
922 f.close()
923
923
924 if skipped:
924 if skipped:
925 mark = 's'
925 mark = 's'
926 if out is None: # debug mode: nothing to parse
926 if out is None: # debug mode: nothing to parse
927 missing = ['unknown']
927 missing = ['unknown']
928 failed = None
928 failed = None
929 else:
929 else:
930 missing, failed = parsehghaveoutput(out)
930 missing, failed = parsehghaveoutput(out)
931 if not missing:
931 if not missing:
932 missing = ['irrelevant']
932 missing = ['irrelevant']
933 if failed:
933 if failed:
934 fail("hghave failed checking for %s" % failed[-1], ret)
934 fail("hghave failed checking for %s" % failed[-1], ret)
935 skipped = False
935 skipped = False
936 else:
936 else:
937 skip(missing[-1])
937 skip(missing[-1])
938 elif ret == 'timeout':
938 elif ret == 'timeout':
939 mark = 't'
939 mark = 't'
940 fail("timed out", ret)
940 fail("timed out", ret)
941 elif out != refout:
941 elif out != refout:
942 mark = '!'
942 mark = '!'
943 if not options.nodiff:
943 if not options.nodiff:
944 iolock.acquire()
944 iolock.acquire()
945 if options.view:
945 if options.view:
946 os.system("%s %s %s" % (options.view, ref, err))
946 os.system("%s %s %s" % (options.view, ref, err))
947 else:
947 else:
948 showdiff(refout, out, ref, err)
948 showdiff(refout, out, ref, err)
949 iolock.release()
949 iolock.release()
950 if ret:
950 if ret:
951 fail("output changed and returned error code %d" % ret, ret)
951 fail("output changed and returned error code %d" % ret, ret)
952 else:
952 else:
953 fail("output changed", ret)
953 fail("output changed", ret)
954 ret = 1
954 ret = 1
955 elif ret:
955 elif ret:
956 mark = '!'
956 mark = '!'
957 fail("returned error code %d" % ret, ret)
957 fail("returned error code %d" % ret, ret)
958 else:
958 else:
959 success()
959 success()
960
960
961 if not options.verbose:
961 if not options.verbose:
962 iolock.acquire()
962 iolock.acquire()
963 sys.stdout.write(mark)
963 sys.stdout.write(mark)
964 sys.stdout.flush()
964 sys.stdout.flush()
965 iolock.release()
965 iolock.release()
966
966
967 killdaemons()
967 killdaemons()
968
968
969 if not options.keep_tmpdir:
969 if not options.keep_tmpdir:
970 shutil.rmtree(testtmp, True)
970 shutil.rmtree(testtmp, True)
971 if skipped:
971 if skipped:
972 return None
972 return None
973 return ret == 0
973 return ret == 0
974
974
975 _hgpath = None
975 _hgpath = None
976
976
977 def _gethgpath():
977 def _gethgpath():
978 """Return the path to the mercurial package that is actually found by
978 """Return the path to the mercurial package that is actually found by
979 the current Python interpreter."""
979 the current Python interpreter."""
980 global _hgpath
980 global _hgpath
981 if _hgpath is not None:
981 if _hgpath is not None:
982 return _hgpath
982 return _hgpath
983
983
984 cmd = '%s -c "import mercurial; print mercurial.__path__[0]"'
984 cmd = '%s -c "import mercurial; print mercurial.__path__[0]"'
985 pipe = os.popen(cmd % PYTHON)
985 pipe = os.popen(cmd % PYTHON)
986 try:
986 try:
987 _hgpath = pipe.read().strip()
987 _hgpath = pipe.read().strip()
988 finally:
988 finally:
989 pipe.close()
989 pipe.close()
990 return _hgpath
990 return _hgpath
991
991
992 def _checkhglib(verb):
992 def _checkhglib(verb):
993 """Ensure that the 'mercurial' package imported by python is
993 """Ensure that the 'mercurial' package imported by python is
994 the one we expect it to be. If not, print a warning to stderr."""
994 the one we expect it to be. If not, print a warning to stderr."""
995 expecthg = os.path.join(PYTHONDIR, 'mercurial')
995 expecthg = os.path.join(PYTHONDIR, 'mercurial')
996 actualhg = _gethgpath()
996 actualhg = _gethgpath()
997 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
997 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
998 sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
998 sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
999 ' (expected %s)\n'
999 ' (expected %s)\n'
1000 % (verb, actualhg, expecthg))
1000 % (verb, actualhg, expecthg))
1001
1001
1002 def runchildren(options, tests):
1002 def runchildren(options, tests):
1003 if INST:
1003 if INST:
1004 installhg(options)
1004 installhg(options)
1005 _checkhglib("Testing")
1005 _checkhglib("Testing")
1006
1006
1007 optcopy = dict(options.__dict__)
1007 optcopy = dict(options.__dict__)
1008 optcopy['jobs'] = 1
1008 optcopy['jobs'] = 1
1009
1009
1010 # Because whitelist has to override keyword matches, we have to
1010 # Because whitelist has to override keyword matches, we have to
1011 # actually load the whitelist in the children as well, so we allow
1011 # actually load the whitelist in the children as well, so we allow
1012 # the list of whitelist files to pass through and be parsed in the
1012 # the list of whitelist files to pass through and be parsed in the
1013 # children, but not the dict of whitelisted tests resulting from
1013 # children, but not the dict of whitelisted tests resulting from
1014 # the parse, used here to override blacklisted tests.
1014 # the parse, used here to override blacklisted tests.
1015 whitelist = optcopy['whitelisted'] or []
1015 whitelist = optcopy['whitelisted'] or []
1016 del optcopy['whitelisted']
1016 del optcopy['whitelisted']
1017
1017
1018 blacklist = optcopy['blacklist'] or []
1018 blacklist = optcopy['blacklist'] or []
1019 del optcopy['blacklist']
1019 del optcopy['blacklist']
1020 blacklisted = []
1020 blacklisted = []
1021
1021
1022 if optcopy['with_hg'] is None:
1022 if optcopy['with_hg'] is None:
1023 optcopy['with_hg'] = os.path.join(BINDIR, "hg")
1023 optcopy['with_hg'] = os.path.join(BINDIR, "hg")
1024 optcopy.pop('anycoverage', None)
1024 optcopy.pop('anycoverage', None)
1025
1025
1026 opts = []
1026 opts = []
1027 for opt, value in optcopy.iteritems():
1027 for opt, value in optcopy.iteritems():
1028 name = '--' + opt.replace('_', '-')
1028 name = '--' + opt.replace('_', '-')
1029 if value is True:
1029 if value is True:
1030 opts.append(name)
1030 opts.append(name)
1031 elif isinstance(value, list):
1031 elif isinstance(value, list):
1032 for v in value:
1032 for v in value:
1033 opts.append(name + '=' + str(v))
1033 opts.append(name + '=' + str(v))
1034 elif value is not None:
1034 elif value is not None:
1035 opts.append(name + '=' + str(value))
1035 opts.append(name + '=' + str(value))
1036
1036
1037 tests.reverse()
1037 tests.reverse()
1038 jobs = [[] for j in xrange(options.jobs)]
1038 jobs = [[] for j in xrange(options.jobs)]
1039 while tests:
1039 while tests:
1040 for job in jobs:
1040 for job in jobs:
1041 if not tests:
1041 if not tests:
1042 break
1042 break
1043 test = tests.pop()
1043 test = tests.pop()
1044 if test not in whitelist and test in blacklist:
1044 if test not in whitelist and test in blacklist:
1045 blacklisted.append(test)
1045 blacklisted.append(test)
1046 else:
1046 else:
1047 job.append(test)
1047 job.append(test)
1048 fps = {}
1048 fps = {}
1049
1049
1050 for j, job in enumerate(jobs):
1050 for j, job in enumerate(jobs):
1051 if not job:
1051 if not job:
1052 continue
1052 continue
1053 rfd, wfd = os.pipe()
1053 rfd, wfd = os.pipe()
1054 childopts = ['--child=%d' % wfd, '--port=%d' % (options.port + j * 3)]
1054 childopts = ['--child=%d' % wfd, '--port=%d' % (options.port + j * 3)]
1055 childtmp = os.path.join(HGTMP, 'child%d' % j)
1055 childtmp = os.path.join(HGTMP, 'child%d' % j)
1056 childopts += ['--tmpdir', childtmp]
1056 childopts += ['--tmpdir', childtmp]
1057 cmdline = [PYTHON, sys.argv[0]] + opts + childopts + job
1057 cmdline = [PYTHON, sys.argv[0]] + opts + childopts + job
1058 vlog(' '.join(cmdline))
1058 vlog(' '.join(cmdline))
1059 fps[os.spawnvp(os.P_NOWAIT, cmdline[0], cmdline)] = os.fdopen(rfd, 'r')
1059 fps[os.spawnvp(os.P_NOWAIT, cmdline[0], cmdline)] = os.fdopen(rfd, 'r')
1060 os.close(wfd)
1060 os.close(wfd)
1061 signal.signal(signal.SIGINT, signal.SIG_IGN)
1061 signal.signal(signal.SIGINT, signal.SIG_IGN)
1062 failures = 0
1062 failures = 0
1063 tested, skipped, failed = 0, 0, 0
1063 tested, skipped, failed = 0, 0, 0
1064 skips = []
1064 skips = []
1065 fails = []
1065 fails = []
1066 while fps:
1066 while fps:
1067 pid, status = os.wait()
1067 pid, status = os.wait()
1068 fp = fps.pop(pid)
1068 fp = fps.pop(pid)
1069 l = fp.read().splitlines()
1069 l = fp.read().splitlines()
1070 try:
1070 try:
1071 test, skip, fail = map(int, l[:3])
1071 test, skip, fail = map(int, l[:3])
1072 except ValueError:
1072 except ValueError:
1073 test, skip, fail = 0, 0, 0
1073 test, skip, fail = 0, 0, 0
1074 split = -fail or len(l)
1074 split = -fail or len(l)
1075 for s in l[3:split]:
1075 for s in l[3:split]:
1076 skips.append(s.split(" ", 1))
1076 skips.append(s.split(" ", 1))
1077 for s in l[split:]:
1077 for s in l[split:]:
1078 fails.append(s.split(" ", 1))
1078 fails.append(s.split(" ", 1))
1079 tested += test
1079 tested += test
1080 skipped += skip
1080 skipped += skip
1081 failed += fail
1081 failed += fail
1082 vlog('pid %d exited, status %d' % (pid, status))
1082 vlog('pid %d exited, status %d' % (pid, status))
1083 failures |= status
1083 failures |= status
1084 print
1084 print
1085 skipped += len(blacklisted)
1085 skipped += len(blacklisted)
1086 if not options.noskips:
1086 if not options.noskips:
1087 for s in skips:
1087 for s in skips:
1088 print "Skipped %s: %s" % (s[0], s[1])
1088 print "Skipped %s: %s" % (s[0], s[1])
1089 for s in blacklisted:
1089 for s in blacklisted:
1090 print "Skipped %s: blacklisted" % s
1090 print "Skipped %s: blacklisted" % s
1091 for s in fails:
1091 for s in fails:
1092 print "Failed %s: %s" % (s[0], s[1])
1092 print "Failed %s: %s" % (s[0], s[1])
1093
1093
1094 _checkhglib("Tested")
1094 _checkhglib("Tested")
1095 print "# Ran %d tests, %d skipped, %d failed." % (
1095 print "# Ran %d tests, %d skipped, %d failed." % (
1096 tested, skipped, failed)
1096 tested, skipped, failed)
1097
1097
1098 if options.anycoverage:
1098 if options.anycoverage:
1099 outputcoverage(options)
1099 outputcoverage(options)
1100 sys.exit(failures != 0)
1100 sys.exit(failures != 0)
1101
1101
1102 results = dict(p=[], f=[], s=[], i=[])
1102 results = dict(p=[], f=[], s=[], i=[])
1103 resultslock = threading.Lock()
1103 resultslock = threading.Lock()
1104 iolock = threading.Lock()
1104 iolock = threading.Lock()
1105
1105
1106 def runqueue(options, tests, results):
1106 def runqueue(options, tests, results):
1107 for test in tests:
1107 for test in tests:
1108 ret = runone(options, test)
1108 ret = runone(options, test)
1109 if options.first and ret is not None and not ret:
1109 if options.first and ret is not None and not ret:
1110 break
1110 break
1111
1111
1112 def runtests(options, tests):
1112 def runtests(options, tests):
1113 global DAEMON_PIDS, HGRCPATH
1113 global DAEMON_PIDS, HGRCPATH
1114 DAEMON_PIDS = os.environ["DAEMON_PIDS"] = os.path.join(HGTMP, 'daemon.pids')
1114 DAEMON_PIDS = os.environ["DAEMON_PIDS"] = os.path.join(HGTMP, 'daemon.pids')
1115 HGRCPATH = os.environ["HGRCPATH"] = os.path.join(HGTMP, '.hgrc')
1115 HGRCPATH = os.environ["HGRCPATH"] = os.path.join(HGTMP, '.hgrc')
1116
1116
1117 try:
1117 try:
1118 if INST:
1118 if INST:
1119 installhg(options)
1119 installhg(options)
1120 _checkhglib("Testing")
1120 _checkhglib("Testing")
1121
1121
1122 if options.restart:
1122 if options.restart:
1123 orig = list(tests)
1123 orig = list(tests)
1124 while tests:
1124 while tests:
1125 if os.path.exists(tests[0] + ".err"):
1125 if os.path.exists(tests[0] + ".err"):
1126 break
1126 break
1127 tests.pop(0)
1127 tests.pop(0)
1128 if not tests:
1128 if not tests:
1129 print "running all tests"
1129 print "running all tests"
1130 tests = orig
1130 tests = orig
1131
1131
1132 runqueue(options, tests, results)
1132 runqueue(options, tests, results)
1133
1133
1134 failed = len(results['f'])
1134 failed = len(results['f'])
1135 tested = len(results['p']) + failed
1135 tested = len(results['p']) + failed
1136 skipped = len(results['s'])
1136 skipped = len(results['s'])
1137 ignored = len(results['i'])
1137 ignored = len(results['i'])
1138
1138
1139 if options.child:
1139 if options.child:
1140 fp = os.fdopen(options.child, 'w')
1140 fp = os.fdopen(options.child, 'w')
1141 fp.write('%d\n%d\n%d\n' % (tested, skipped, failed))
1141 fp.write('%d\n%d\n%d\n' % (tested, skipped, failed))
1142 for s in results['s']:
1142 for s in results['s']:
1143 fp.write("%s %s\n" % s)
1143 fp.write("%s %s\n" % s)
1144 for s in results['f']:
1144 for s in results['f']:
1145 fp.write("%s %s\n" % s)
1145 fp.write("%s %s\n" % s)
1146 fp.close()
1146 fp.close()
1147 else:
1147 else:
1148 print
1148 print
1149 for s in results['s']:
1149 for s in results['s']:
1150 print "Skipped %s: %s" % s
1150 print "Skipped %s: %s" % s
1151 for s in results['f']:
1151 for s in results['f']:
1152 print "Failed %s: %s" % s
1152 print "Failed %s: %s" % s
1153 _checkhglib("Tested")
1153 _checkhglib("Tested")
1154 print "# Ran %d tests, %d skipped, %d failed." % (
1154 print "# Ran %d tests, %d skipped, %d failed." % (
1155 tested, skipped + ignored, failed)
1155 tested, skipped + ignored, failed)
1156
1156
1157 if options.anycoverage:
1157 if options.anycoverage:
1158 outputcoverage(options)
1158 outputcoverage(options)
1159 except KeyboardInterrupt:
1159 except KeyboardInterrupt:
1160 failed = True
1160 failed = True
1161 print "\ninterrupted!"
1161 print "\ninterrupted!"
1162
1162
1163 if failed:
1163 if failed:
1164 sys.exit(1)
1164 sys.exit(1)
1165
1165
1166 def main():
1166 def main():
1167 (options, args) = parseargs()
1167 (options, args) = parseargs()
1168 if not options.child:
1168 if not options.child:
1169 os.umask(022)
1169 os.umask(022)
1170
1170
1171 checktools()
1171 checktools()
1172
1172
1173 if len(args) == 0:
1173 if len(args) == 0:
1174 args = os.listdir(".")
1174 args = os.listdir(".")
1175 args.sort()
1175 args.sort()
1176
1176
1177 tests = args
1177 tests = args
1178
1178
1179 # Reset some environment variables to well-known values so that
1179 # Reset some environment variables to well-known values so that
1180 # the tests produce repeatable output.
1180 # the tests produce repeatable output.
1181 os.environ['LANG'] = os.environ['LC_ALL'] = os.environ['LANGUAGE'] = 'C'
1181 os.environ['LANG'] = os.environ['LC_ALL'] = os.environ['LANGUAGE'] = 'C'
1182 os.environ['TZ'] = 'GMT'
1182 os.environ['TZ'] = 'GMT'
1183 os.environ["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1183 os.environ["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1184 os.environ['CDPATH'] = ''
1184 os.environ['CDPATH'] = ''
1185 os.environ['COLUMNS'] = '80'
1185 os.environ['COLUMNS'] = '80'
1186 os.environ['GREP_OPTIONS'] = ''
1186 os.environ['GREP_OPTIONS'] = ''
1187 os.environ['http_proxy'] = ''
1187 os.environ['http_proxy'] = ''
1188 os.environ['no_proxy'] = ''
1188 os.environ['no_proxy'] = ''
1189 os.environ['NO_PROXY'] = ''
1189 os.environ['NO_PROXY'] = ''
1190 os.environ['TERM'] = 'xterm'
1190 os.environ['TERM'] = 'xterm'
1191
1191
1192 # unset env related to hooks
1192 # unset env related to hooks
1193 for k in os.environ.keys():
1193 for k in os.environ.keys():
1194 if k.startswith('HG_'):
1194 if k.startswith('HG_'):
1195 # can't remove on solaris
1195 # can't remove on solaris
1196 os.environ[k] = ''
1196 os.environ[k] = ''
1197 del os.environ[k]
1197 del os.environ[k]
1198
1198
1199 global TESTDIR, HGTMP, INST, BINDIR, PYTHONDIR, COVERAGE_FILE
1199 global TESTDIR, HGTMP, INST, BINDIR, PYTHONDIR, COVERAGE_FILE
1200 TESTDIR = os.environ["TESTDIR"] = os.getcwd()
1200 TESTDIR = os.environ["TESTDIR"] = os.getcwd()
1201 if options.tmpdir:
1201 if options.tmpdir:
1202 options.keep_tmpdir = True
1202 options.keep_tmpdir = True
1203 tmpdir = options.tmpdir
1203 tmpdir = options.tmpdir
1204 if os.path.exists(tmpdir):
1204 if os.path.exists(tmpdir):
1205 # Meaning of tmpdir has changed since 1.3: we used to create
1205 # Meaning of tmpdir has changed since 1.3: we used to create
1206 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
1206 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
1207 # tmpdir already exists.
1207 # tmpdir already exists.
1208 sys.exit("error: temp dir %r already exists" % tmpdir)
1208 sys.exit("error: temp dir %r already exists" % tmpdir)
1209
1209
1210 # Automatically removing tmpdir sounds convenient, but could
1210 # Automatically removing tmpdir sounds convenient, but could
1211 # really annoy anyone in the habit of using "--tmpdir=/tmp"
1211 # really annoy anyone in the habit of using "--tmpdir=/tmp"
1212 # or "--tmpdir=$HOME".
1212 # or "--tmpdir=$HOME".
1213 #vlog("# Removing temp dir", tmpdir)
1213 #vlog("# Removing temp dir", tmpdir)
1214 #shutil.rmtree(tmpdir)
1214 #shutil.rmtree(tmpdir)
1215 os.makedirs(tmpdir)
1215 os.makedirs(tmpdir)
1216 else:
1216 else:
1217 tmpdir = tempfile.mkdtemp('', 'hgtests.')
1217 tmpdir = tempfile.mkdtemp('', 'hgtests.')
1218 HGTMP = os.environ['HGTMP'] = os.path.realpath(tmpdir)
1218 HGTMP = os.environ['HGTMP'] = os.path.realpath(tmpdir)
1219 DAEMON_PIDS = None
1219 DAEMON_PIDS = None
1220 HGRCPATH = None
1220 HGRCPATH = None
1221
1221
1222 os.environ["HGEDITOR"] = sys.executable + ' -c "import sys; sys.exit(0)"'
1222 os.environ["HGEDITOR"] = sys.executable + ' -c "import sys; sys.exit(0)"'
1223 os.environ["HGMERGE"] = "internal:merge"
1223 os.environ["HGMERGE"] = "internal:merge"
1224 os.environ["HGUSER"] = "test"
1224 os.environ["HGUSER"] = "test"
1225 os.environ["HGENCODING"] = "ascii"
1225 os.environ["HGENCODING"] = "ascii"
1226 os.environ["HGENCODINGMODE"] = "strict"
1226 os.environ["HGENCODINGMODE"] = "strict"
1227 os.environ["HGPORT"] = str(options.port)
1227 os.environ["HGPORT"] = str(options.port)
1228 os.environ["HGPORT1"] = str(options.port + 1)
1228 os.environ["HGPORT1"] = str(options.port + 1)
1229 os.environ["HGPORT2"] = str(options.port + 2)
1229 os.environ["HGPORT2"] = str(options.port + 2)
1230
1230
1231 if options.with_hg:
1231 if options.with_hg:
1232 INST = None
1232 INST = None
1233 BINDIR = os.path.dirname(os.path.realpath(options.with_hg))
1233 BINDIR = os.path.dirname(os.path.realpath(options.with_hg))
1234
1234
1235 # This looks redundant with how Python initializes sys.path from
1235 # This looks redundant with how Python initializes sys.path from
1236 # the location of the script being executed. Needed because the
1236 # the location of the script being executed. Needed because the
1237 # "hg" specified by --with-hg is not the only Python script
1237 # "hg" specified by --with-hg is not the only Python script
1238 # executed in the test suite that needs to import 'mercurial'
1238 # executed in the test suite that needs to import 'mercurial'
1239 # ... which means it's not really redundant at all.
1239 # ... which means it's not really redundant at all.
1240 PYTHONDIR = BINDIR
1240 PYTHONDIR = BINDIR
1241 else:
1241 else:
1242 INST = os.path.join(HGTMP, "install")
1242 INST = os.path.join(HGTMP, "install")
1243 BINDIR = os.environ["BINDIR"] = os.path.join(INST, "bin")
1243 BINDIR = os.environ["BINDIR"] = os.path.join(INST, "bin")
1244 PYTHONDIR = os.path.join(INST, "lib", "python")
1244 PYTHONDIR = os.path.join(INST, "lib", "python")
1245
1245
1246 os.environ["BINDIR"] = BINDIR
1246 os.environ["BINDIR"] = BINDIR
1247 os.environ["PYTHON"] = PYTHON
1247 os.environ["PYTHON"] = PYTHON
1248
1248
1249 if not options.child:
1249 if not options.child:
1250 path = [BINDIR] + os.environ["PATH"].split(os.pathsep)
1250 path = [BINDIR] + os.environ["PATH"].split(os.pathsep)
1251 os.environ["PATH"] = os.pathsep.join(path)
1251 os.environ["PATH"] = os.pathsep.join(path)
1252
1252
1253 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
1253 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
1254 # can run .../tests/run-tests.py test-foo where test-foo
1254 # can run .../tests/run-tests.py test-foo where test-foo
1255 # adds an extension to HGRC
1255 # adds an extension to HGRC
1256 pypath = [PYTHONDIR, TESTDIR]
1256 pypath = [PYTHONDIR, TESTDIR]
1257 # We have to augment PYTHONPATH, rather than simply replacing
1257 # We have to augment PYTHONPATH, rather than simply replacing
1258 # it, in case external libraries are only available via current
1258 # it, in case external libraries are only available via current
1259 # PYTHONPATH. (In particular, the Subversion bindings on OS X
1259 # PYTHONPATH. (In particular, the Subversion bindings on OS X
1260 # are in /opt/subversion.)
1260 # are in /opt/subversion.)
1261 oldpypath = os.environ.get(IMPL_PATH)
1261 oldpypath = os.environ.get(IMPL_PATH)
1262 if oldpypath:
1262 if oldpypath:
1263 pypath.append(oldpypath)
1263 pypath.append(oldpypath)
1264 os.environ[IMPL_PATH] = os.pathsep.join(pypath)
1264 os.environ[IMPL_PATH] = os.pathsep.join(pypath)
1265
1265
1266 COVERAGE_FILE = os.path.join(TESTDIR, ".coverage")
1266 COVERAGE_FILE = os.path.join(TESTDIR, ".coverage")
1267
1267
1268 vlog("# Using TESTDIR", TESTDIR)
1268 vlog("# Using TESTDIR", TESTDIR)
1269 vlog("# Using HGTMP", HGTMP)
1269 vlog("# Using HGTMP", HGTMP)
1270 vlog("# Using PATH", os.environ["PATH"])
1270 vlog("# Using PATH", os.environ["PATH"])
1271 vlog("# Using", IMPL_PATH, os.environ[IMPL_PATH])
1271 vlog("# Using", IMPL_PATH, os.environ[IMPL_PATH])
1272
1272
1273 try:
1273 try:
1274 if len(tests) > 1 and options.jobs > 1:
1274 if len(tests) > 1 and options.jobs > 1:
1275 runchildren(options, tests)
1275 runchildren(options, tests)
1276 else:
1276 else:
1277 runtests(options, tests)
1277 runtests(options, tests)
1278 finally:
1278 finally:
1279 time.sleep(.1)
1279 time.sleep(.1)
1280 cleanup(options)
1280 cleanup(options)
1281
1281
1282 if __name__ == '__main__':
1282 if __name__ == '__main__':
1283 main()
1283 main()
@@ -1,345 +1,277 b''
1 $ check_code="$TESTDIR"/../contrib/check-code.py
1 $ check_code="$TESTDIR"/../contrib/check-code.py
2 $ cd "$TESTDIR"/..
2 $ cd "$TESTDIR"/..
3 $ if hg identify -q > /dev/null; then :
3 $ if hg identify -q > /dev/null; then :
4 > else
4 > else
5 > echo "skipped: not a Mercurial working dir" >&2
5 > echo "skipped: not a Mercurial working dir" >&2
6 > exit 80
6 > exit 80
7 > fi
7 > fi
8 $ hg manifest | xargs "$check_code" || echo 'FAILURE IS NOT AN OPTION!!!'
8 $ hg manifest | xargs "$check_code" || echo 'FAILURE IS NOT AN OPTION!!!'
9
9
10 $ hg manifest | xargs "$check_code" --warnings --nolineno --per-file=0 || true
10 $ hg manifest | xargs "$check_code" --warnings --nolineno --per-file=0 || true
11 contrib/perf.py:0:
11 contrib/perf.py:0:
12 > except:
12 > except:
13 warning: naked except clause
13 warning: naked except clause
14 contrib/perf.py:0:
14 contrib/perf.py:0:
15 > except:
15 > except:
16 warning: naked except clause
16 warning: naked except clause
17 contrib/setup3k.py:0:
17 contrib/setup3k.py:0:
18 > except:
18 > except:
19 warning: naked except clause
19 warning: naked except clause
20 contrib/setup3k.py:0:
21 > except:
22 warning: naked except clause
23 contrib/setup3k.py:0:
24 > except:
25 warning: naked except clause
26 warning: naked except clause
27 warning: naked except clause
28 contrib/shrink-revlog.py:0:
20 contrib/shrink-revlog.py:0:
29 > except:
21 > except:
30 warning: naked except clause
22 warning: naked except clause
31 hgext/convert/bzr.py:0:
23 hgext/convert/bzr.py:0:
32 > except:
24 > except:
33 warning: naked except clause
25 warning: naked except clause
34 hgext/convert/common.py:0:
35 > except:
36 warning: naked except clause
37 hgext/convert/common.py:0:
38 > except:
39 warning: naked except clause
40 warning: naked except clause
41 hgext/convert/convcmd.py:0:
26 hgext/convert/convcmd.py:0:
42 > except:
27 > except:
43 warning: naked except clause
28 warning: naked except clause
44 hgext/convert/cvsps.py:0:
29 hgext/convert/cvsps.py:0:
45 > ui.write('Ancestors: %s\n' % (','.join(r)))
30 > ui.write('Ancestors: %s\n' % (','.join(r)))
46 warning: unwrapped ui message
31 warning: unwrapped ui message
47 hgext/convert/cvsps.py:0:
32 hgext/convert/cvsps.py:0:
48 > ui.write('Parent: %d\n' % cs.parents[0].id)
33 > ui.write('Parent: %d\n' % cs.parents[0].id)
49 warning: unwrapped ui message
34 warning: unwrapped ui message
50 hgext/convert/cvsps.py:0:
35 hgext/convert/cvsps.py:0:
51 > ui.write('Parents: %s\n' %
36 > ui.write('Parents: %s\n' %
52 warning: unwrapped ui message
37 warning: unwrapped ui message
53 hgext/convert/cvsps.py:0:
38 hgext/convert/cvsps.py:0:
54 > except:
55 warning: naked except clause
56 hgext/convert/cvsps.py:0:
57 > ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
39 > ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
58 warning: unwrapped ui message
40 warning: unwrapped ui message
59 hgext/convert/cvsps.py:0:
41 hgext/convert/cvsps.py:0:
60 > ui.write('Author: %s\n' % cs.author)
42 > ui.write('Author: %s\n' % cs.author)
61 warning: unwrapped ui message
43 warning: unwrapped ui message
62 hgext/convert/cvsps.py:0:
44 hgext/convert/cvsps.py:0:
63 > ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
45 > ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
64 warning: unwrapped ui message
46 warning: unwrapped ui message
65 hgext/convert/cvsps.py:0:
47 hgext/convert/cvsps.py:0:
66 > ui.write('Date: %s\n' % util.datestr(cs.date,
48 > ui.write('Date: %s\n' % util.datestr(cs.date,
67 warning: unwrapped ui message
49 warning: unwrapped ui message
68 hgext/convert/cvsps.py:0:
50 hgext/convert/cvsps.py:0:
69 > ui.write('Log:\n')
51 > ui.write('Log:\n')
70 warning: unwrapped ui message
52 warning: unwrapped ui message
71 hgext/convert/cvsps.py:0:
53 hgext/convert/cvsps.py:0:
72 > ui.write('Members: \n')
54 > ui.write('Members: \n')
73 warning: unwrapped ui message
55 warning: unwrapped ui message
74 hgext/convert/cvsps.py:0:
56 hgext/convert/cvsps.py:0:
75 > ui.write('PatchSet %d \n' % cs.id)
57 > ui.write('PatchSet %d \n' % cs.id)
76 warning: unwrapped ui message
58 warning: unwrapped ui message
77 hgext/convert/cvsps.py:0:
59 hgext/convert/cvsps.py:0:
78 > ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
60 > ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
79 warning: unwrapped ui message
61 warning: unwrapped ui message
80 hgext/convert/git.py:0:
62 hgext/convert/git.py:0:
81 > except:
63 > except:
82 warning: naked except clause
64 warning: naked except clause
83 hgext/convert/hg.py:0:
65 hgext/convert/hg.py:0:
84 > except:
66 > except:
85 warning: naked except clause
67 warning: naked except clause
86 warning: naked except clause
68 warning: naked except clause
87 hgext/convert/hg.py:0:
69 hgext/convert/hg.py:0:
88 > except:
70 > except:
89 warning: naked except clause
71 warning: naked except clause
90 hgext/convert/monotone.py:0:
72 hgext/convert/monotone.py:0:
91 > except:
92 warning: naked except clause
93 hgext/convert/monotone.py:0:
94 > except:
73 > except:
95 warning: naked except clause
74 warning: naked except clause
96 hgext/convert/subversion.py:0:
75 hgext/convert/subversion.py:0:
97 > except:
98 warning: naked except clause
99 hgext/convert/subversion.py:0:
100 > except:
76 > except:
101 warning: naked except clause
77 warning: naked except clause
102 hgext/gpg.py:0:
103 > except:
104 warning: naked except clause
105 hgext/hgcia.py:0:
106 > except:
107 warning: naked except clause
108 hgext/hgk.py:0:
78 hgext/hgk.py:0:
109 > ui.write("parent %s\n" % p)
79 > ui.write("parent %s\n" % p)
110 warning: unwrapped ui message
80 warning: unwrapped ui message
111 hgext/hgk.py:0:
81 hgext/hgk.py:0:
112 > ui.write('k=%s\nv=%s\n' % (name, value))
82 > ui.write('k=%s\nv=%s\n' % (name, value))
113 warning: unwrapped ui message
83 warning: unwrapped ui message
114 hgext/hgk.py:0:
84 hgext/hgk.py:0:
115 > ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
85 > ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
116 warning: unwrapped ui message
86 warning: unwrapped ui message
117 hgext/hgk.py:0:
87 hgext/hgk.py:0:
118 > ui.write("branch %s\n\n" % ctx.branch())
88 > ui.write("branch %s\n\n" % ctx.branch())
119 warning: unwrapped ui message
89 warning: unwrapped ui message
120 hgext/hgk.py:0:
90 hgext/hgk.py:0:
121 > ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
91 > ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
122 warning: unwrapped ui message
92 warning: unwrapped ui message
123 hgext/hgk.py:0:
93 hgext/hgk.py:0:
124 > ui.write("revision %d\n" % ctx.rev())
94 > ui.write("revision %d\n" % ctx.rev())
125 warning: unwrapped ui message
95 warning: unwrapped ui message
126 hgext/hgk.py:0:
96 hgext/hgk.py:0:
127 > ui.write("tree %s\n" % short(ctx.changeset()[0]))
97 > ui.write("tree %s\n" % short(ctx.changeset()[0]))
128 warning: unwrapped ui message
98 warning: unwrapped ui message
129 hgext/inotify/server.py:0:
130 > except:
131 warning: naked except clause
132 hgext/inotify/server.py:0:
133 > except:
134 warning: naked except clause
135 hgext/keyword.py:0:
99 hgext/keyword.py:0:
136 > ui.note("hg ci -m '%s'\n" % msg)
100 > ui.note("hg ci -m '%s'\n" % msg)
137 warning: unwrapped ui message
101 warning: unwrapped ui message
138 hgext/mq.py:0:
102 hgext/mq.py:0:
139 > except:
103 > except:
140 warning: naked except clause
104 warning: naked except clause
141 warning: naked except clause
142 hgext/mq.py:0:
105 hgext/mq.py:0:
143 > except:
106 > except:
144 warning: naked except clause
107 warning: naked except clause
145 warning: naked except clause
108 warning: naked except clause
146 warning: naked except clause
109 warning: naked except clause
147 warning: naked except clause
110 warning: naked except clause
148 hgext/mq.py:0:
111 hgext/mq.py:0:
149 > except:
112 > except:
150 warning: naked except clause
113 warning: naked except clause
151 hgext/mq.py:0:
114 hgext/mq.py:0:
152 > ui.write("mq: %s\n" % ', '.join(m))
115 > ui.write("mq: %s\n" % ', '.join(m))
153 warning: unwrapped ui message
116 warning: unwrapped ui message
154 hgext/patchbomb.py:0:
117 hgext/patchbomb.py:0:
155 > except:
156 warning: naked except clause
157 hgext/patchbomb.py:0:
158 > ui.write('Subject: %s\n' % subj)
118 > ui.write('Subject: %s\n' % subj)
159 warning: unwrapped ui message
119 warning: unwrapped ui message
160 hgext/patchbomb.py:0:
120 hgext/patchbomb.py:0:
161 > ui.write('From: %s\n' % sender)
121 > ui.write('From: %s\n' % sender)
162 warning: unwrapped ui message
122 warning: unwrapped ui message
163 hgext/zeroconf/__init__.py:0:
164 > except:
165 warning: naked except clause
166 warning: naked except clause
167 mercurial/commands.py:0:
123 mercurial/commands.py:0:
168 > ui.note('branch %s\n' % data)
124 > ui.note('branch %s\n' % data)
169 warning: unwrapped ui message
125 warning: unwrapped ui message
170 mercurial/commands.py:0:
126 mercurial/commands.py:0:
171 > ui.note('node %s\n' % str(data))
127 > ui.note('node %s\n' % str(data))
172 warning: unwrapped ui message
128 warning: unwrapped ui message
173 mercurial/commands.py:0:
129 mercurial/commands.py:0:
174 > ui.note('tag %s\n' % name)
130 > ui.note('tag %s\n' % name)
175 warning: unwrapped ui message
131 warning: unwrapped ui message
176 mercurial/commands.py:0:
132 mercurial/commands.py:0:
177 > ui.write("unpruned common: %s\n" % " ".join([short(n)
133 > ui.write("unpruned common: %s\n" % " ".join([short(n)
178 warning: unwrapped ui message
134 warning: unwrapped ui message
179 mercurial/commands.py:0:
135 mercurial/commands.py:0:
180 > except:
136 > except:
181 warning: naked except clause
137 warning: naked except clause
182 mercurial/commands.py:0:
138 mercurial/commands.py:0:
183 > ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
139 > ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
184 warning: unwrapped ui message
140 warning: unwrapped ui message
185 mercurial/commands.py:0:
141 mercurial/commands.py:0:
186 > ui.write("local is subset\n")
142 > ui.write("local is subset\n")
187 warning: unwrapped ui message
143 warning: unwrapped ui message
188 mercurial/commands.py:0:
144 mercurial/commands.py:0:
189 > ui.write("remote is subset\n")
145 > ui.write("remote is subset\n")
190 warning: unwrapped ui message
146 warning: unwrapped ui message
191 mercurial/commands.py:0:
147 mercurial/commands.py:0:
192 > ui.write('deltas against other : ' + fmt % pcfmt(numother,
148 > ui.write('deltas against other : ' + fmt % pcfmt(numother,
193 warning: unwrapped ui message
149 warning: unwrapped ui message
194 mercurial/commands.py:0:
150 mercurial/commands.py:0:
195 > ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
151 > ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
196 warning: unwrapped ui message
152 warning: unwrapped ui message
197 mercurial/commands.py:0:
153 mercurial/commands.py:0:
198 > ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
154 > ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
199 warning: unwrapped ui message
155 warning: unwrapped ui message
200 mercurial/commands.py:0:
156 mercurial/commands.py:0:
201 > except:
157 > except:
202 warning: naked except clause
158 warning: naked except clause
203 mercurial/commands.py:0:
159 mercurial/commands.py:0:
204 > ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
160 > ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
205 warning: unwrapped ui message
161 warning: unwrapped ui message
206 mercurial/commands.py:0:
162 mercurial/commands.py:0:
207 > ui.write("match: %s\n" % m(d[0]))
163 > ui.write("match: %s\n" % m(d[0]))
208 warning: unwrapped ui message
164 warning: unwrapped ui message
209 mercurial/commands.py:0:
165 mercurial/commands.py:0:
210 > ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
166 > ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
211 warning: unwrapped ui message
167 warning: unwrapped ui message
212 mercurial/commands.py:0:
168 mercurial/commands.py:0:
213 > ui.write('path %s\n' % k)
169 > ui.write('path %s\n' % k)
214 warning: unwrapped ui message
170 warning: unwrapped ui message
215 mercurial/commands.py:0:
171 mercurial/commands.py:0:
216 > ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
172 > ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
217 warning: unwrapped ui message
173 warning: unwrapped ui message
218 mercurial/commands.py:0:
174 mercurial/commands.py:0:
219 > ui.write("digraph G {\n")
175 > ui.write("digraph G {\n")
220 warning: unwrapped ui message
176 warning: unwrapped ui message
221 mercurial/commands.py:0:
177 mercurial/commands.py:0:
222 > ui.write("internal: %s %s\n" % d)
178 > ui.write("internal: %s %s\n" % d)
223 warning: unwrapped ui message
179 warning: unwrapped ui message
224 mercurial/commands.py:0:
180 mercurial/commands.py:0:
225 > ui.write("standard: %s\n" % util.datestr(d))
181 > ui.write("standard: %s\n" % util.datestr(d))
226 warning: unwrapped ui message
182 warning: unwrapped ui message
227 mercurial/commands.py:0:
183 mercurial/commands.py:0:
228 > ui.write('avg chain length : ' + fmt % avgchainlen)
184 > ui.write('avg chain length : ' + fmt % avgchainlen)
229 warning: unwrapped ui message
185 warning: unwrapped ui message
230 mercurial/commands.py:0:
186 mercurial/commands.py:0:
231 > ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
187 > ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
232 warning: unwrapped ui message
188 warning: unwrapped ui message
233 mercurial/commands.py:0:
189 mercurial/commands.py:0:
234 > ui.write('compression ratio : ' + fmt % compratio)
190 > ui.write('compression ratio : ' + fmt % compratio)
235 warning: unwrapped ui message
191 warning: unwrapped ui message
236 mercurial/commands.py:0:
192 mercurial/commands.py:0:
237 > ui.write('delta size (min/max/avg) : %d / %d / %d\n'
193 > ui.write('delta size (min/max/avg) : %d / %d / %d\n'
238 warning: unwrapped ui message
194 warning: unwrapped ui message
239 mercurial/commands.py:0:
195 mercurial/commands.py:0:
240 > ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
196 > ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
241 warning: unwrapped ui message
197 warning: unwrapped ui message
242 mercurial/commands.py:0:
198 mercurial/commands.py:0:
243 > ui.write('flags : %s\n' % ', '.join(flags))
199 > ui.write('flags : %s\n' % ', '.join(flags))
244 warning: unwrapped ui message
200 warning: unwrapped ui message
245 mercurial/commands.py:0:
201 mercurial/commands.py:0:
246 > ui.write('format : %d\n' % format)
202 > ui.write('format : %d\n' % format)
247 warning: unwrapped ui message
203 warning: unwrapped ui message
248 mercurial/commands.py:0:
204 mercurial/commands.py:0:
249 > ui.write('full revision size (min/max/avg) : %d / %d / %d\n'
205 > ui.write('full revision size (min/max/avg) : %d / %d / %d\n'
250 warning: unwrapped ui message
206 warning: unwrapped ui message
251 mercurial/commands.py:0:
207 mercurial/commands.py:0:
252 > ui.write('revision size : ' + fmt2 % totalsize)
208 > ui.write('revision size : ' + fmt2 % totalsize)
253 warning: unwrapped ui message
209 warning: unwrapped ui message
254 mercurial/commands.py:0:
210 mercurial/commands.py:0:
255 > ui.write('revisions : ' + fmt2 % numrevs)
211 > ui.write('revisions : ' + fmt2 % numrevs)
256 warning: unwrapped ui message
212 warning: unwrapped ui message
257 warning: unwrapped ui message
213 warning: unwrapped ui message
258 mercurial/commands.py:0:
214 mercurial/commands.py:0:
259 > ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
215 > ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
260 warning: unwrapped ui message
216 warning: unwrapped ui message
261 mercurial/dispatch.py:0:
217 mercurial/dispatch.py:0:
262 > except:
218 > except:
263 warning: naked except clause
219 warning: naked except clause
264 mercurial/dispatch.py:0:
220 mercurial/dispatch.py:0:
265 > except:
221 > except:
266 warning: naked except clause
222 warning: naked except clause
267 mercurial/hg.py:0:
223 mercurial/hg.py:0:
268 > except:
224 > except:
269 warning: naked except clause
225 warning: naked except clause
270 mercurial/keepalive.py:0:
226 mercurial/keepalive.py:0:
271 > except:
227 > except:
272 warning: naked except clause
228 warning: naked except clause
273 mercurial/keepalive.py:0:
274 > except:
275 warning: naked except clause
276 mercurial/localrepo.py:0:
229 mercurial/localrepo.py:0:
277 > except:
230 > except:
278 warning: naked except clause
231 warning: naked except clause
279 mercurial/patch.py:0:
232 mercurial/patch.py:0:
280 > except:
233 > except:
281 warning: naked except clause
234 warning: naked except clause
282 mercurial/repair.py:0:
235 mercurial/repair.py:0:
283 > except:
236 > except:
284 warning: naked except clause
237 warning: naked except clause
285 mercurial/repair.py:0:
238 mercurial/repair.py:0:
286 > except:
239 > except:
287 warning: naked except clause
240 warning: naked except clause
288 mercurial/sshrepo.py:0:
289 > except:
290 warning: naked except clause
291 mercurial/transaction.py:0:
241 mercurial/transaction.py:0:
292 > except:
242 > except:
293 warning: naked except clause
243 warning: naked except clause
294 mercurial/util.py:0:
244 mercurial/util.py:0:
295 > except:
296 warning: naked except clause
297 mercurial/util.py:0:
298 > except:
245 > except:
299 warning: naked except clause
246 warning: naked except clause
300 mercurial/verify.py:0:
247 mercurial/verify.py:0:
301 > except:
248 > except:
302 warning: naked except clause
249 warning: naked except clause
303 mercurial/verify.py:0:
250 mercurial/verify.py:0:
304 > except:
251 > except:
305 warning: naked except clause
252 warning: naked except clause
306 setup.py:0:
253 setup.py:0:
307 > except:
254 > except:
308 warning: naked except clause
255 warning: naked except clause
309 setup.py:0:
310 > except:
311 warning: naked except clause
312 warning: naked except clause
313 setup.py:0:
314 > except:
315 warning: naked except clause
316 warning: naked except clause
317 warning: naked except clause
318 tests/autodiff.py:0:
256 tests/autodiff.py:0:
319 > ui.write('data lost for: %s\n' % fn)
257 > ui.write('data lost for: %s\n' % fn)
320 warning: unwrapped ui message
258 warning: unwrapped ui message
321 tests/run-tests.py:0:
322 > except:
323 warning: naked except clause
324 tests/test-convert-mtn.t:0:
259 tests/test-convert-mtn.t:0:
325 > > function get_passphrase(keypair_id)
260 > > function get_passphrase(keypair_id)
326 don't use 'function', use old style
261 don't use 'function', use old style
327 tests/test-filecache.py:0:
328 > except:
329 warning: naked except clause
330 tests/test-import-git.t:0:
262 tests/test-import-git.t:0:
331 > > Mc\${NkU|\`?^000jF3jhEB
263 > > Mc\${NkU|\`?^000jF3jhEB
332 ^ must be quoted
264 ^ must be quoted
333 tests/test-import.t:0:
265 tests/test-import.t:0:
334 > > diff -Naur proj-orig/foo proj-new/foo
266 > > diff -Naur proj-orig/foo proj-new/foo
335 don't use 'diff -N'
267 don't use 'diff -N'
336 don't use 'diff -N'
268 don't use 'diff -N'
337 tests/test-schemes.t:0:
269 tests/test-schemes.t:0:
338 > > z = file:\$PWD/
270 > > z = file:\$PWD/
339 don't use $PWD, use `pwd`
271 don't use $PWD, use `pwd`
340 tests/test-ui-color.py:0:
272 tests/test-ui-color.py:0:
341 > testui.warn('warning\n')
273 > testui.warn('warning\n')
342 warning: unwrapped ui message
274 warning: unwrapped ui message
343 tests/test-ui-color.py:0:
275 tests/test-ui-color.py:0:
344 > testui.write('buffered\n')
276 > testui.write('buffered\n')
345 warning: unwrapped ui message
277 warning: unwrapped ui message
@@ -1,95 +1,95 b''
1 import sys, os, subprocess
1 import sys, os, subprocess
2
2
3 if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
3 if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
4 'cacheable']):
4 'cacheable']):
5 sys.exit(80)
5 sys.exit(80)
6
6
7 from mercurial import util, scmutil, extensions
7 from mercurial import util, scmutil, extensions
8
8
9 filecache = scmutil.filecache
9 filecache = scmutil.filecache
10
10
11 class fakerepo(object):
11 class fakerepo(object):
12 def __init__(self):
12 def __init__(self):
13 self._filecache = {}
13 self._filecache = {}
14
14
15 def join(self, p):
15 def join(self, p):
16 return p
16 return p
17
17
18 def sjoin(self, p):
18 def sjoin(self, p):
19 return p
19 return p
20
20
21 @filecache('x')
21 @filecache('x')
22 def cached(self):
22 def cached(self):
23 print 'creating'
23 print 'creating'
24
24
25 def invalidate(self):
25 def invalidate(self):
26 for k in self._filecache:
26 for k in self._filecache:
27 try:
27 try:
28 delattr(self, k)
28 delattr(self, k)
29 except AttributeError:
29 except AttributeError:
30 pass
30 pass
31
31
32 def basic(repo):
32 def basic(repo):
33 # file doesn't exist, calls function
33 # file doesn't exist, calls function
34 repo.cached
34 repo.cached
35
35
36 repo.invalidate()
36 repo.invalidate()
37 # file still doesn't exist, uses cache
37 # file still doesn't exist, uses cache
38 repo.cached
38 repo.cached
39
39
40 # create empty file
40 # create empty file
41 f = open('x', 'w')
41 f = open('x', 'w')
42 f.close()
42 f.close()
43 repo.invalidate()
43 repo.invalidate()
44 # should recreate the object
44 # should recreate the object
45 repo.cached
45 repo.cached
46
46
47 f = open('x', 'w')
47 f = open('x', 'w')
48 f.write('a')
48 f.write('a')
49 f.close()
49 f.close()
50 repo.invalidate()
50 repo.invalidate()
51 # should recreate the object
51 # should recreate the object
52 repo.cached
52 repo.cached
53
53
54 repo.invalidate()
54 repo.invalidate()
55 # stats file again, nothing changed, reuses object
55 # stats file again, nothing changed, reuses object
56 repo.cached
56 repo.cached
57
57
58 # atomic replace file, size doesn't change
58 # atomic replace file, size doesn't change
59 # hopefully st_mtime doesn't change as well so this doesn't use the cache
59 # hopefully st_mtime doesn't change as well so this doesn't use the cache
60 # because of inode change
60 # because of inode change
61 f = scmutil.opener('.')('x', 'w', atomictemp=True)
61 f = scmutil.opener('.')('x', 'w', atomictemp=True)
62 f.write('b')
62 f.write('b')
63 f.close()
63 f.close()
64
64
65 repo.invalidate()
65 repo.invalidate()
66 repo.cached
66 repo.cached
67
67
68 def fakeuncacheable():
68 def fakeuncacheable():
69 def wrapcacheable(orig, *args, **kwargs):
69 def wrapcacheable(orig, *args, **kwargs):
70 return False
70 return False
71
71
72 def wrapinit(orig, *args, **kwargs):
72 def wrapinit(orig, *args, **kwargs):
73 pass
73 pass
74
74
75 originit = extensions.wrapfunction(util.cachestat, '__init__', wrapinit)
75 originit = extensions.wrapfunction(util.cachestat, '__init__', wrapinit)
76 origcacheable = extensions.wrapfunction(util.cachestat, 'cacheable',
76 origcacheable = extensions.wrapfunction(util.cachestat, 'cacheable',
77 wrapcacheable)
77 wrapcacheable)
78
78
79 try:
79 try:
80 os.remove('x')
80 os.remove('x')
81 except:
81 except OSError:
82 pass
82 pass
83
83
84 basic(fakerepo())
84 basic(fakerepo())
85
85
86 util.cachestat.cacheable = origcacheable
86 util.cachestat.cacheable = origcacheable
87 util.cachestat.__init__ = originit
87 util.cachestat.__init__ = originit
88
88
89 print 'basic:'
89 print 'basic:'
90 print
90 print
91 basic(fakerepo())
91 basic(fakerepo())
92 print
92 print
93 print 'fakeuncacheable:'
93 print 'fakeuncacheable:'
94 print
94 print
95 fakeuncacheable()
95 fakeuncacheable()
General Comments 0
You need to be logged in to leave comments. Login now