##// END OF EJS Templates
lfs: introduce a user level cache for lfs files...
Matt Harbison -
r35281:8e72f915 default
parent child Browse files
Show More
@@ -1,184 +1,191 b''
1 # lfs - hash-preserving large file support using Git-LFS protocol
1 # lfs - hash-preserving large file support using Git-LFS protocol
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """lfs - large file support (EXPERIMENTAL)
8 """lfs - large file support (EXPERIMENTAL)
9
9
10 Configs::
10 Configs::
11
11
12 [lfs]
12 [lfs]
13 # Remote endpoint. Multiple protocols are supported:
13 # Remote endpoint. Multiple protocols are supported:
14 # - http(s)://user:pass@example.com/path
14 # - http(s)://user:pass@example.com/path
15 # git-lfs endpoint
15 # git-lfs endpoint
16 # - file:///tmp/path
16 # - file:///tmp/path
17 # local filesystem, usually for testing
17 # local filesystem, usually for testing
18 # if unset, lfs will prompt setting this when it must use this value.
18 # if unset, lfs will prompt setting this when it must use this value.
19 # (default: unset)
19 # (default: unset)
20 url = https://example.com/lfs
20 url = https://example.com/lfs
21
21
22 # size of a file to make it use LFS
22 # size of a file to make it use LFS
23 threshold = 10M
23 threshold = 10M
24
24
25 # how many times to retry before giving up on transferring an object
25 # how many times to retry before giving up on transferring an object
26 retry = 5
26 retry = 5
27
28 # the local directory to store lfs files for sharing across local clones.
29 # If not set, the cache is located in an OS specific cache location.
30 usercache = /path/to/global/cache
27 """
31 """
28
32
29 from __future__ import absolute_import
33 from __future__ import absolute_import
30
34
31 from mercurial.i18n import _
35 from mercurial.i18n import _
32
36
33 from mercurial import (
37 from mercurial import (
34 bundle2,
38 bundle2,
35 changegroup,
39 changegroup,
36 context,
40 context,
37 exchange,
41 exchange,
38 extensions,
42 extensions,
39 filelog,
43 filelog,
40 hg,
44 hg,
41 localrepo,
45 localrepo,
42 registrar,
46 registrar,
43 revlog,
47 revlog,
44 scmutil,
48 scmutil,
45 vfs as vfsmod,
49 vfs as vfsmod,
46 )
50 )
47
51
48 from . import (
52 from . import (
49 blobstore,
53 blobstore,
50 wrapper,
54 wrapper,
51 )
55 )
52
56
53 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
57 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
54 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
58 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
55 # be specifying the version(s) of Mercurial they are tested with, or
59 # be specifying the version(s) of Mercurial they are tested with, or
56 # leave the attribute unspecified.
60 # leave the attribute unspecified.
57 testedwith = 'ships-with-hg-core'
61 testedwith = 'ships-with-hg-core'
58
62
59 configtable = {}
63 configtable = {}
60 configitem = registrar.configitem(configtable)
64 configitem = registrar.configitem(configtable)
61
65
62 configitem('lfs', 'url',
66 configitem('lfs', 'url',
63 default=configitem.dynamicdefault,
67 default=configitem.dynamicdefault,
64 )
68 )
69 configitem('lfs', 'usercache',
70 default=None,
71 )
65 configitem('lfs', 'threshold',
72 configitem('lfs', 'threshold',
66 default=None,
73 default=None,
67 )
74 )
68 configitem('lfs', 'retry',
75 configitem('lfs', 'retry',
69 default=5,
76 default=5,
70 )
77 )
71 # Deprecated
78 # Deprecated
72 configitem('lfs', 'remotestore',
79 configitem('lfs', 'remotestore',
73 default=None,
80 default=None,
74 )
81 )
75 # Deprecated
82 # Deprecated
76 configitem('lfs', 'dummy',
83 configitem('lfs', 'dummy',
77 default=None,
84 default=None,
78 )
85 )
79 # Deprecated
86 # Deprecated
80 configitem('lfs', 'git-lfs',
87 configitem('lfs', 'git-lfs',
81 default=None,
88 default=None,
82 )
89 )
83
90
84 cmdtable = {}
91 cmdtable = {}
85 command = registrar.command(cmdtable)
92 command = registrar.command(cmdtable)
86
93
87 templatekeyword = registrar.templatekeyword()
94 templatekeyword = registrar.templatekeyword()
88
95
89 def featuresetup(ui, supported):
96 def featuresetup(ui, supported):
90 # don't die on seeing a repo with the lfs requirement
97 # don't die on seeing a repo with the lfs requirement
91 supported |= {'lfs'}
98 supported |= {'lfs'}
92
99
93 def uisetup(ui):
100 def uisetup(ui):
94 localrepo.localrepository.featuresetupfuncs.add(featuresetup)
101 localrepo.localrepository.featuresetupfuncs.add(featuresetup)
95
102
96 def reposetup(ui, repo):
103 def reposetup(ui, repo):
97 # Nothing to do with a remote repo
104 # Nothing to do with a remote repo
98 if not repo.local():
105 if not repo.local():
99 return
106 return
100
107
101 threshold = repo.ui.configbytes('lfs', 'threshold')
108 threshold = repo.ui.configbytes('lfs', 'threshold')
102
109
103 repo.svfs.options['lfsthreshold'] = threshold
110 repo.svfs.options['lfsthreshold'] = threshold
104 repo.svfs.lfslocalblobstore = blobstore.local(repo)
111 repo.svfs.lfslocalblobstore = blobstore.local(repo)
105 repo.svfs.lfsremoteblobstore = blobstore.remote(repo)
112 repo.svfs.lfsremoteblobstore = blobstore.remote(repo)
106
113
107 # Push hook
114 # Push hook
108 repo.prepushoutgoinghooks.add('lfs', wrapper.prepush)
115 repo.prepushoutgoinghooks.add('lfs', wrapper.prepush)
109
116
110 if 'lfs' not in repo.requirements:
117 if 'lfs' not in repo.requirements:
111 def checkrequireslfs(ui, repo, **kwargs):
118 def checkrequireslfs(ui, repo, **kwargs):
112 if 'lfs' not in repo.requirements:
119 if 'lfs' not in repo.requirements:
113 ctx = repo[kwargs['node']]
120 ctx = repo[kwargs['node']]
114 # TODO: is there a way to just walk the files in the commit?
121 # TODO: is there a way to just walk the files in the commit?
115 if any(ctx[f].islfs() for f in ctx.files()):
122 if any(ctx[f].islfs() for f in ctx.files()):
116 repo.requirements.add('lfs')
123 repo.requirements.add('lfs')
117 repo._writerequirements()
124 repo._writerequirements()
118
125
119 ui.setconfig('hooks', 'commit.lfs', checkrequireslfs, 'lfs')
126 ui.setconfig('hooks', 'commit.lfs', checkrequireslfs, 'lfs')
120
127
121 def wrapfilelog(filelog):
128 def wrapfilelog(filelog):
122 wrapfunction = extensions.wrapfunction
129 wrapfunction = extensions.wrapfunction
123
130
124 wrapfunction(filelog, 'addrevision', wrapper.filelogaddrevision)
131 wrapfunction(filelog, 'addrevision', wrapper.filelogaddrevision)
125 wrapfunction(filelog, 'renamed', wrapper.filelogrenamed)
132 wrapfunction(filelog, 'renamed', wrapper.filelogrenamed)
126 wrapfunction(filelog, 'size', wrapper.filelogsize)
133 wrapfunction(filelog, 'size', wrapper.filelogsize)
127
134
128 def extsetup(ui):
135 def extsetup(ui):
129 wrapfilelog(filelog.filelog)
136 wrapfilelog(filelog.filelog)
130
137
131 wrapfunction = extensions.wrapfunction
138 wrapfunction = extensions.wrapfunction
132
139
133 wrapfunction(scmutil, 'wrapconvertsink', wrapper.convertsink)
140 wrapfunction(scmutil, 'wrapconvertsink', wrapper.convertsink)
134
141
135 wrapfunction(changegroup,
142 wrapfunction(changegroup,
136 'supportedoutgoingversions',
143 'supportedoutgoingversions',
137 wrapper.supportedoutgoingversions)
144 wrapper.supportedoutgoingversions)
138 wrapfunction(changegroup,
145 wrapfunction(changegroup,
139 'allsupportedversions',
146 'allsupportedversions',
140 wrapper.allsupportedversions)
147 wrapper.allsupportedversions)
141
148
142 wrapfunction(context.basefilectx, 'cmp', wrapper.filectxcmp)
149 wrapfunction(context.basefilectx, 'cmp', wrapper.filectxcmp)
143 wrapfunction(context.basefilectx, 'isbinary', wrapper.filectxisbinary)
150 wrapfunction(context.basefilectx, 'isbinary', wrapper.filectxisbinary)
144 context.basefilectx.islfs = wrapper.filectxislfs
151 context.basefilectx.islfs = wrapper.filectxislfs
145
152
146 revlog.addflagprocessor(
153 revlog.addflagprocessor(
147 revlog.REVIDX_EXTSTORED,
154 revlog.REVIDX_EXTSTORED,
148 (
155 (
149 wrapper.readfromstore,
156 wrapper.readfromstore,
150 wrapper.writetostore,
157 wrapper.writetostore,
151 wrapper.bypasscheckhash,
158 wrapper.bypasscheckhash,
152 ),
159 ),
153 )
160 )
154
161
155 wrapfunction(hg, 'clone', wrapper.hgclone)
162 wrapfunction(hg, 'clone', wrapper.hgclone)
156 wrapfunction(hg, 'postshare', wrapper.hgpostshare)
163 wrapfunction(hg, 'postshare', wrapper.hgpostshare)
157
164
158 # Make bundle choose changegroup3 instead of changegroup2. This affects
165 # Make bundle choose changegroup3 instead of changegroup2. This affects
159 # "hg bundle" command. Note: it does not cover all bundle formats like
166 # "hg bundle" command. Note: it does not cover all bundle formats like
160 # "packed1". Using "packed1" with lfs will likely cause trouble.
167 # "packed1". Using "packed1" with lfs will likely cause trouble.
161 names = [k for k, v in exchange._bundlespeccgversions.items() if v == '02']
168 names = [k for k, v in exchange._bundlespeccgversions.items() if v == '02']
162 for k in names:
169 for k in names:
163 exchange._bundlespeccgversions[k] = '03'
170 exchange._bundlespeccgversions[k] = '03'
164
171
165 # bundlerepo uses "vfsmod.readonlyvfs(othervfs)", we need to make sure lfs
172 # bundlerepo uses "vfsmod.readonlyvfs(othervfs)", we need to make sure lfs
166 # options and blob stores are passed from othervfs to the new readonlyvfs.
173 # options and blob stores are passed from othervfs to the new readonlyvfs.
167 wrapfunction(vfsmod.readonlyvfs, '__init__', wrapper.vfsinit)
174 wrapfunction(vfsmod.readonlyvfs, '__init__', wrapper.vfsinit)
168
175
169 # when writing a bundle via "hg bundle" command, upload related LFS blobs
176 # when writing a bundle via "hg bundle" command, upload related LFS blobs
170 wrapfunction(bundle2, 'writenewbundle', wrapper.writenewbundle)
177 wrapfunction(bundle2, 'writenewbundle', wrapper.writenewbundle)
171
178
172 @templatekeyword('lfs_files')
179 @templatekeyword('lfs_files')
173 def lfsfiles(repo, ctx, **args):
180 def lfsfiles(repo, ctx, **args):
174 """List of strings. LFS files added or modified by the changeset."""
181 """List of strings. LFS files added or modified by the changeset."""
175 pointers = wrapper.pointersfromctx(ctx) # {path: pointer}
182 pointers = wrapper.pointersfromctx(ctx) # {path: pointer}
176 return sorted(pointers.keys())
183 return sorted(pointers.keys())
177
184
178 @command('debuglfsupload',
185 @command('debuglfsupload',
179 [('r', 'rev', [], _('upload large files introduced by REV'))])
186 [('r', 'rev', [], _('upload large files introduced by REV'))])
180 def debuglfsupload(ui, repo, **opts):
187 def debuglfsupload(ui, repo, **opts):
181 """upload lfs blobs added by the working copy parent or given revisions"""
188 """upload lfs blobs added by the working copy parent or given revisions"""
182 revs = opts.get('rev', [])
189 revs = opts.get('rev', [])
183 pointers = wrapper.extractpointers(repo, scmutil.revrange(repo, revs))
190 pointers = wrapper.extractpointers(repo, scmutil.revrange(repo, revs))
184 wrapper.uploadblobs(repo, pointers)
191 wrapper.uploadblobs(repo, pointers)
@@ -1,347 +1,358 b''
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import json
10 import json
11 import os
11 import os
12 import re
12 import re
13
13
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15
15
16 from mercurial import (
16 from mercurial import (
17 error,
17 error,
18 url as urlmod,
18 url as urlmod,
19 util,
19 util,
20 vfs as vfsmod,
20 vfs as vfsmod,
21 )
21 )
22
22
23 from ..largefiles import lfutil
24
23 # 64 bytes for SHA256
25 # 64 bytes for SHA256
24 _lfsre = re.compile(r'\A[a-f0-9]{64}\Z')
26 _lfsre = re.compile(r'\A[a-f0-9]{64}\Z')
25
27
26 class lfsvfs(vfsmod.vfs):
28 class lfsvfs(vfsmod.vfs):
27 def join(self, path):
29 def join(self, path):
28 """split the path at first two characters, like: XX/XXXXX..."""
30 """split the path at first two characters, like: XX/XXXXX..."""
29 if not _lfsre.match(path):
31 if not _lfsre.match(path):
30 raise error.ProgrammingError('unexpected lfs path: %s' % path)
32 raise error.ProgrammingError('unexpected lfs path: %s' % path)
31 return super(lfsvfs, self).join(path[0:2], path[2:])
33 return super(lfsvfs, self).join(path[0:2], path[2:])
32
34
33 class filewithprogress(object):
35 class filewithprogress(object):
34 """a file-like object that supports __len__ and read.
36 """a file-like object that supports __len__ and read.
35
37
36 Useful to provide progress information for how many bytes are read.
38 Useful to provide progress information for how many bytes are read.
37 """
39 """
38
40
39 def __init__(self, fp, callback):
41 def __init__(self, fp, callback):
40 self._fp = fp
42 self._fp = fp
41 self._callback = callback # func(readsize)
43 self._callback = callback # func(readsize)
42 fp.seek(0, os.SEEK_END)
44 fp.seek(0, os.SEEK_END)
43 self._len = fp.tell()
45 self._len = fp.tell()
44 fp.seek(0)
46 fp.seek(0)
45
47
46 def __len__(self):
48 def __len__(self):
47 return self._len
49 return self._len
48
50
49 def read(self, size):
51 def read(self, size):
50 if self._fp is None:
52 if self._fp is None:
51 return b''
53 return b''
52 data = self._fp.read(size)
54 data = self._fp.read(size)
53 if data:
55 if data:
54 if self._callback:
56 if self._callback:
55 self._callback(len(data))
57 self._callback(len(data))
56 else:
58 else:
57 self._fp.close()
59 self._fp.close()
58 self._fp = None
60 self._fp = None
59 return data
61 return data
60
62
61 class local(object):
63 class local(object):
62 """Local blobstore for large file contents.
64 """Local blobstore for large file contents.
63
65
64 This blobstore is used both as a cache and as a staging area for large blobs
66 This blobstore is used both as a cache and as a staging area for large blobs
65 to be uploaded to the remote blobstore.
67 to be uploaded to the remote blobstore.
66 """
68 """
67
69
68 def __init__(self, repo):
70 def __init__(self, repo):
69 fullpath = repo.svfs.join('lfs/objects')
71 fullpath = repo.svfs.join('lfs/objects')
70 self.vfs = lfsvfs(fullpath)
72 self.vfs = lfsvfs(fullpath)
73 usercache = lfutil._usercachedir(repo.ui, 'lfs')
74 self.cachevfs = lfsvfs(usercache)
71
75
72 def write(self, oid, data):
76 def write(self, oid, data):
73 """Write blob to local blobstore."""
77 """Write blob to local blobstore."""
74 with self.vfs(oid, 'wb', atomictemp=True) as fp:
78 with self.vfs(oid, 'wb', atomictemp=True) as fp:
75 fp.write(data)
79 fp.write(data)
76
80
81 # XXX: should we verify the content of the cache, and hardlink back to
82 # the local store on success, but truncate, write and link on failure?
83 if not self.cachevfs.exists(oid):
84 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
85
77 def read(self, oid):
86 def read(self, oid):
78 """Read blob from local blobstore."""
87 """Read blob from local blobstore."""
88 if not self.vfs.exists(oid):
89 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
79 return self.vfs.read(oid)
90 return self.vfs.read(oid)
80
91
81 def has(self, oid):
92 def has(self, oid):
82 """Returns True if the local blobstore contains the requested blob,
93 """Returns True if the local blobstore contains the requested blob,
83 False otherwise."""
94 False otherwise."""
84 return self.vfs.exists(oid)
95 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
85
96
86 class _gitlfsremote(object):
97 class _gitlfsremote(object):
87
98
88 def __init__(self, repo, url):
99 def __init__(self, repo, url):
89 ui = repo.ui
100 ui = repo.ui
90 self.ui = ui
101 self.ui = ui
91 baseurl, authinfo = url.authinfo()
102 baseurl, authinfo = url.authinfo()
92 self.baseurl = baseurl.rstrip('/')
103 self.baseurl = baseurl.rstrip('/')
93 self.urlopener = urlmod.opener(ui, authinfo)
104 self.urlopener = urlmod.opener(ui, authinfo)
94 self.retry = ui.configint('lfs', 'retry')
105 self.retry = ui.configint('lfs', 'retry')
95
106
96 def writebatch(self, pointers, fromstore):
107 def writebatch(self, pointers, fromstore):
97 """Batch upload from local to remote blobstore."""
108 """Batch upload from local to remote blobstore."""
98 self._batch(pointers, fromstore, 'upload')
109 self._batch(pointers, fromstore, 'upload')
99
110
100 def readbatch(self, pointers, tostore):
111 def readbatch(self, pointers, tostore):
101 """Batch download from remote to local blostore."""
112 """Batch download from remote to local blostore."""
102 self._batch(pointers, tostore, 'download')
113 self._batch(pointers, tostore, 'download')
103
114
104 def _batchrequest(self, pointers, action):
115 def _batchrequest(self, pointers, action):
105 """Get metadata about objects pointed by pointers for given action
116 """Get metadata about objects pointed by pointers for given action
106
117
107 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
118 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
108 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
119 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
109 """
120 """
110 objects = [{'oid': p.oid(), 'size': p.size()} for p in pointers]
121 objects = [{'oid': p.oid(), 'size': p.size()} for p in pointers]
111 requestdata = json.dumps({
122 requestdata = json.dumps({
112 'objects': objects,
123 'objects': objects,
113 'operation': action,
124 'operation': action,
114 })
125 })
115 batchreq = util.urlreq.request('%s/objects/batch' % self.baseurl,
126 batchreq = util.urlreq.request('%s/objects/batch' % self.baseurl,
116 data=requestdata)
127 data=requestdata)
117 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
128 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
118 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
129 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
119 try:
130 try:
120 rawjson = self.urlopener.open(batchreq).read()
131 rawjson = self.urlopener.open(batchreq).read()
121 except util.urlerr.httperror as ex:
132 except util.urlerr.httperror as ex:
122 raise LfsRemoteError(_('LFS HTTP error: %s (action=%s)')
133 raise LfsRemoteError(_('LFS HTTP error: %s (action=%s)')
123 % (ex, action))
134 % (ex, action))
124 try:
135 try:
125 response = json.loads(rawjson)
136 response = json.loads(rawjson)
126 except ValueError:
137 except ValueError:
127 raise LfsRemoteError(_('LFS server returns invalid JSON: %s')
138 raise LfsRemoteError(_('LFS server returns invalid JSON: %s')
128 % rawjson)
139 % rawjson)
129 return response
140 return response
130
141
131 def _checkforservererror(self, pointers, responses):
142 def _checkforservererror(self, pointers, responses):
132 """Scans errors from objects
143 """Scans errors from objects
133
144
134 Returns LfsRemoteError if any objects has an error"""
145 Returns LfsRemoteError if any objects has an error"""
135 for response in responses:
146 for response in responses:
136 error = response.get('error')
147 error = response.get('error')
137 if error:
148 if error:
138 ptrmap = {p.oid(): p for p in pointers}
149 ptrmap = {p.oid(): p for p in pointers}
139 p = ptrmap.get(response['oid'], None)
150 p = ptrmap.get(response['oid'], None)
140 if error['code'] == 404 and p:
151 if error['code'] == 404 and p:
141 filename = getattr(p, 'filename', 'unknown')
152 filename = getattr(p, 'filename', 'unknown')
142 raise LfsRemoteError(
153 raise LfsRemoteError(
143 _(('LFS server error. Remote object '
154 _(('LFS server error. Remote object '
144 'for file %s not found: %r')) % (filename, response))
155 'for file %s not found: %r')) % (filename, response))
145 raise LfsRemoteError(_('LFS server error: %r') % response)
156 raise LfsRemoteError(_('LFS server error: %r') % response)
146
157
147 def _extractobjects(self, response, pointers, action):
158 def _extractobjects(self, response, pointers, action):
148 """extract objects from response of the batch API
159 """extract objects from response of the batch API
149
160
150 response: parsed JSON object returned by batch API
161 response: parsed JSON object returned by batch API
151 return response['objects'] filtered by action
162 return response['objects'] filtered by action
152 raise if any object has an error
163 raise if any object has an error
153 """
164 """
154 # Scan errors from objects - fail early
165 # Scan errors from objects - fail early
155 objects = response.get('objects', [])
166 objects = response.get('objects', [])
156 self._checkforservererror(pointers, objects)
167 self._checkforservererror(pointers, objects)
157
168
158 # Filter objects with given action. Practically, this skips uploading
169 # Filter objects with given action. Practically, this skips uploading
159 # objects which exist in the server.
170 # objects which exist in the server.
160 filteredobjects = [o for o in objects if action in o.get('actions', [])]
171 filteredobjects = [o for o in objects if action in o.get('actions', [])]
161 # But for downloading, we want all objects. Therefore missing objects
172 # But for downloading, we want all objects. Therefore missing objects
162 # should be considered an error.
173 # should be considered an error.
163 if action == 'download':
174 if action == 'download':
164 if len(filteredobjects) < len(objects):
175 if len(filteredobjects) < len(objects):
165 missing = [o.get('oid', '?')
176 missing = [o.get('oid', '?')
166 for o in objects
177 for o in objects
167 if action not in o.get('actions', [])]
178 if action not in o.get('actions', [])]
168 raise LfsRemoteError(
179 raise LfsRemoteError(
169 _('LFS server claims required objects do not exist:\n%s')
180 _('LFS server claims required objects do not exist:\n%s')
170 % '\n'.join(missing))
181 % '\n'.join(missing))
171
182
172 return filteredobjects
183 return filteredobjects
173
184
174 def _basictransfer(self, obj, action, localstore, progress=None):
185 def _basictransfer(self, obj, action, localstore, progress=None):
175 """Download or upload a single object using basic transfer protocol
186 """Download or upload a single object using basic transfer protocol
176
187
177 obj: dict, an object description returned by batch API
188 obj: dict, an object description returned by batch API
178 action: string, one of ['upload', 'download']
189 action: string, one of ['upload', 'download']
179 localstore: blobstore.local
190 localstore: blobstore.local
180
191
181 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
192 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
182 basic-transfers.md
193 basic-transfers.md
183 """
194 """
184 oid = str(obj['oid'])
195 oid = str(obj['oid'])
185
196
186 href = str(obj['actions'][action].get('href'))
197 href = str(obj['actions'][action].get('href'))
187 headers = obj['actions'][action].get('header', {}).items()
198 headers = obj['actions'][action].get('header', {}).items()
188
199
189 request = util.urlreq.request(href)
200 request = util.urlreq.request(href)
190 if action == 'upload':
201 if action == 'upload':
191 # If uploading blobs, read data from local blobstore.
202 # If uploading blobs, read data from local blobstore.
192 request.data = filewithprogress(localstore.vfs(oid), progress)
203 request.data = filewithprogress(localstore.vfs(oid), progress)
193 request.get_method = lambda: 'PUT'
204 request.get_method = lambda: 'PUT'
194
205
195 for k, v in headers:
206 for k, v in headers:
196 request.add_header(k, v)
207 request.add_header(k, v)
197
208
198 response = b''
209 response = b''
199 try:
210 try:
200 req = self.urlopener.open(request)
211 req = self.urlopener.open(request)
201 while True:
212 while True:
202 data = req.read(1048576)
213 data = req.read(1048576)
203 if not data:
214 if not data:
204 break
215 break
205 if action == 'download' and progress:
216 if action == 'download' and progress:
206 progress(len(data))
217 progress(len(data))
207 response += data
218 response += data
208 except util.urlerr.httperror as ex:
219 except util.urlerr.httperror as ex:
209 raise LfsRemoteError(_('HTTP error: %s (oid=%s, action=%s)')
220 raise LfsRemoteError(_('HTTP error: %s (oid=%s, action=%s)')
210 % (ex, oid, action))
221 % (ex, oid, action))
211
222
212 if action == 'download':
223 if action == 'download':
213 # If downloading blobs, store downloaded data to local blobstore
224 # If downloading blobs, store downloaded data to local blobstore
214 localstore.write(oid, response)
225 localstore.write(oid, response)
215
226
216 def _batch(self, pointers, localstore, action):
227 def _batch(self, pointers, localstore, action):
217 if action not in ['upload', 'download']:
228 if action not in ['upload', 'download']:
218 raise error.ProgrammingError('invalid Git-LFS action: %s' % action)
229 raise error.ProgrammingError('invalid Git-LFS action: %s' % action)
219
230
220 response = self._batchrequest(pointers, action)
231 response = self._batchrequest(pointers, action)
221 prunningsize = [0]
232 prunningsize = [0]
222 objects = self._extractobjects(response, pointers, action)
233 objects = self._extractobjects(response, pointers, action)
223 total = sum(x.get('size', 0) for x in objects)
234 total = sum(x.get('size', 0) for x in objects)
224 topic = {'upload': _('lfs uploading'),
235 topic = {'upload': _('lfs uploading'),
225 'download': _('lfs downloading')}[action]
236 'download': _('lfs downloading')}[action]
226 if self.ui.verbose and len(objects) > 1:
237 if self.ui.verbose and len(objects) > 1:
227 self.ui.write(_('lfs: need to transfer %d objects (%s)\n')
238 self.ui.write(_('lfs: need to transfer %d objects (%s)\n')
228 % (len(objects), util.bytecount(total)))
239 % (len(objects), util.bytecount(total)))
229 self.ui.progress(topic, 0, total=total)
240 self.ui.progress(topic, 0, total=total)
230 def progress(size):
241 def progress(size):
231 # advance progress bar by "size" bytes
242 # advance progress bar by "size" bytes
232 prunningsize[0] += size
243 prunningsize[0] += size
233 self.ui.progress(topic, prunningsize[0], total=total)
244 self.ui.progress(topic, prunningsize[0], total=total)
234 for obj in sorted(objects, key=lambda o: o.get('oid')):
245 for obj in sorted(objects, key=lambda o: o.get('oid')):
235 objsize = obj.get('size', 0)
246 objsize = obj.get('size', 0)
236 if self.ui.verbose:
247 if self.ui.verbose:
237 if action == 'download':
248 if action == 'download':
238 msg = _('lfs: downloading %s (%s)\n')
249 msg = _('lfs: downloading %s (%s)\n')
239 elif action == 'upload':
250 elif action == 'upload':
240 msg = _('lfs: uploading %s (%s)\n')
251 msg = _('lfs: uploading %s (%s)\n')
241 self.ui.write(msg % (obj.get('oid'), util.bytecount(objsize)))
252 self.ui.write(msg % (obj.get('oid'), util.bytecount(objsize)))
242 origrunningsize = prunningsize[0]
253 origrunningsize = prunningsize[0]
243 retry = self.retry
254 retry = self.retry
244 while True:
255 while True:
245 prunningsize[0] = origrunningsize
256 prunningsize[0] = origrunningsize
246 try:
257 try:
247 self._basictransfer(obj, action, localstore,
258 self._basictransfer(obj, action, localstore,
248 progress=progress)
259 progress=progress)
249 break
260 break
250 except Exception as ex:
261 except Exception as ex:
251 if retry > 0:
262 if retry > 0:
252 if self.ui.verbose:
263 if self.ui.verbose:
253 self.ui.write(
264 self.ui.write(
254 _('lfs: failed: %r (remaining retry %d)\n')
265 _('lfs: failed: %r (remaining retry %d)\n')
255 % (ex, retry))
266 % (ex, retry))
256 retry -= 1
267 retry -= 1
257 continue
268 continue
258 raise
269 raise
259
270
260 self.ui.progress(topic, pos=None, total=total)
271 self.ui.progress(topic, pos=None, total=total)
261
272
262 def __del__(self):
273 def __del__(self):
263 # copied from mercurial/httppeer.py
274 # copied from mercurial/httppeer.py
264 urlopener = getattr(self, 'urlopener', None)
275 urlopener = getattr(self, 'urlopener', None)
265 if urlopener:
276 if urlopener:
266 for h in urlopener.handlers:
277 for h in urlopener.handlers:
267 h.close()
278 h.close()
268 getattr(h, "close_all", lambda : None)()
279 getattr(h, "close_all", lambda : None)()
269
280
270 class _dummyremote(object):
281 class _dummyremote(object):
271 """Dummy store storing blobs to temp directory."""
282 """Dummy store storing blobs to temp directory."""
272
283
273 def __init__(self, repo, url):
284 def __init__(self, repo, url):
274 fullpath = repo.vfs.join('lfs', url.path)
285 fullpath = repo.vfs.join('lfs', url.path)
275 self.vfs = lfsvfs(fullpath)
286 self.vfs = lfsvfs(fullpath)
276
287
277 def writebatch(self, pointers, fromstore):
288 def writebatch(self, pointers, fromstore):
278 for p in pointers:
289 for p in pointers:
279 content = fromstore.read(p.oid())
290 content = fromstore.read(p.oid())
280 with self.vfs(p.oid(), 'wb', atomictemp=True) as fp:
291 with self.vfs(p.oid(), 'wb', atomictemp=True) as fp:
281 fp.write(content)
292 fp.write(content)
282
293
283 def readbatch(self, pointers, tostore):
294 def readbatch(self, pointers, tostore):
284 for p in pointers:
295 for p in pointers:
285 content = self.vfs.read(p.oid())
296 content = self.vfs.read(p.oid())
286 tostore.write(p.oid(), content)
297 tostore.write(p.oid(), content)
287
298
288 class _nullremote(object):
299 class _nullremote(object):
289 """Null store storing blobs to /dev/null."""
300 """Null store storing blobs to /dev/null."""
290
301
291 def __init__(self, repo, url):
302 def __init__(self, repo, url):
292 pass
303 pass
293
304
294 def writebatch(self, pointers, fromstore):
305 def writebatch(self, pointers, fromstore):
295 pass
306 pass
296
307
297 def readbatch(self, pointers, tostore):
308 def readbatch(self, pointers, tostore):
298 pass
309 pass
299
310
300 class _promptremote(object):
311 class _promptremote(object):
301 """Prompt user to set lfs.url when accessed."""
312 """Prompt user to set lfs.url when accessed."""
302
313
303 def __init__(self, repo, url):
314 def __init__(self, repo, url):
304 pass
315 pass
305
316
306 def writebatch(self, pointers, fromstore, ui=None):
317 def writebatch(self, pointers, fromstore, ui=None):
307 self._prompt()
318 self._prompt()
308
319
309 def readbatch(self, pointers, tostore, ui=None):
320 def readbatch(self, pointers, tostore, ui=None):
310 self._prompt()
321 self._prompt()
311
322
312 def _prompt(self):
323 def _prompt(self):
313 raise error.Abort(_('lfs.url needs to be configured'))
324 raise error.Abort(_('lfs.url needs to be configured'))
314
325
315 _storemap = {
326 _storemap = {
316 'https': _gitlfsremote,
327 'https': _gitlfsremote,
317 'http': _gitlfsremote,
328 'http': _gitlfsremote,
318 'file': _dummyremote,
329 'file': _dummyremote,
319 'null': _nullremote,
330 'null': _nullremote,
320 None: _promptremote,
331 None: _promptremote,
321 }
332 }
322
333
323 def remote(repo):
334 def remote(repo):
324 """remotestore factory. return a store in _storemap depending on config"""
335 """remotestore factory. return a store in _storemap depending on config"""
325 defaulturl = ''
336 defaulturl = ''
326
337
327 # convert deprecated configs to the new url. TODO: remove this if other
338 # convert deprecated configs to the new url. TODO: remove this if other
328 # places are migrated to the new url config.
339 # places are migrated to the new url config.
329 # deprecated config: lfs.remotestore
340 # deprecated config: lfs.remotestore
330 deprecatedstore = repo.ui.config('lfs', 'remotestore')
341 deprecatedstore = repo.ui.config('lfs', 'remotestore')
331 if deprecatedstore == 'dummy':
342 if deprecatedstore == 'dummy':
332 # deprecated config: lfs.remotepath
343 # deprecated config: lfs.remotepath
333 defaulturl = 'file://' + repo.ui.config('lfs', 'remotepath')
344 defaulturl = 'file://' + repo.ui.config('lfs', 'remotepath')
334 elif deprecatedstore == 'git-lfs':
345 elif deprecatedstore == 'git-lfs':
335 # deprecated config: lfs.remoteurl
346 # deprecated config: lfs.remoteurl
336 defaulturl = repo.ui.config('lfs', 'remoteurl')
347 defaulturl = repo.ui.config('lfs', 'remoteurl')
337 elif deprecatedstore == 'null':
348 elif deprecatedstore == 'null':
338 defaulturl = 'null://'
349 defaulturl = 'null://'
339
350
340 url = util.url(repo.ui.config('lfs', 'url', defaulturl))
351 url = util.url(repo.ui.config('lfs', 'url', defaulturl))
341 scheme = url.scheme
352 scheme = url.scheme
342 if scheme not in _storemap:
353 if scheme not in _storemap:
343 raise error.Abort(_('lfs: unknown url scheme: %s') % scheme)
354 raise error.Abort(_('lfs: unknown url scheme: %s') % scheme)
344 return _storemap[scheme](repo, url)
355 return _storemap[scheme](repo, url)
345
356
346 class LfsRemoteError(error.RevlogError):
357 class LfsRemoteError(error.RevlogError):
347 pass
358 pass
@@ -1,3014 +1,3017 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # run-tests.py - Run a set of tests on Mercurial
3 # run-tests.py - Run a set of tests on Mercurial
4 #
4 #
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 # Modifying this script is tricky because it has many modes:
10 # Modifying this script is tricky because it has many modes:
11 # - serial (default) vs parallel (-jN, N > 1)
11 # - serial (default) vs parallel (-jN, N > 1)
12 # - no coverage (default) vs coverage (-c, -C, -s)
12 # - no coverage (default) vs coverage (-c, -C, -s)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
14 # - tests are a mix of shell scripts and Python scripts
14 # - tests are a mix of shell scripts and Python scripts
15 #
15 #
16 # If you change this script, it is recommended that you ensure you
16 # If you change this script, it is recommended that you ensure you
17 # haven't broken it by running it in various modes with a representative
17 # haven't broken it by running it in various modes with a representative
18 # sample of test scripts. For example:
18 # sample of test scripts. For example:
19 #
19 #
20 # 1) serial, no coverage, temp install:
20 # 1) serial, no coverage, temp install:
21 # ./run-tests.py test-s*
21 # ./run-tests.py test-s*
22 # 2) serial, no coverage, local hg:
22 # 2) serial, no coverage, local hg:
23 # ./run-tests.py --local test-s*
23 # ./run-tests.py --local test-s*
24 # 3) serial, coverage, temp install:
24 # 3) serial, coverage, temp install:
25 # ./run-tests.py -c test-s*
25 # ./run-tests.py -c test-s*
26 # 4) serial, coverage, local hg:
26 # 4) serial, coverage, local hg:
27 # ./run-tests.py -c --local test-s* # unsupported
27 # ./run-tests.py -c --local test-s* # unsupported
28 # 5) parallel, no coverage, temp install:
28 # 5) parallel, no coverage, temp install:
29 # ./run-tests.py -j2 test-s*
29 # ./run-tests.py -j2 test-s*
30 # 6) parallel, no coverage, local hg:
30 # 6) parallel, no coverage, local hg:
31 # ./run-tests.py -j2 --local test-s*
31 # ./run-tests.py -j2 --local test-s*
32 # 7) parallel, coverage, temp install:
32 # 7) parallel, coverage, temp install:
33 # ./run-tests.py -j2 -c test-s* # currently broken
33 # ./run-tests.py -j2 -c test-s* # currently broken
34 # 8) parallel, coverage, local install:
34 # 8) parallel, coverage, local install:
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
36 # 9) parallel, custom tmp dir:
36 # 9) parallel, custom tmp dir:
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
38 # 10) parallel, pure, tests that call run-tests:
38 # 10) parallel, pure, tests that call run-tests:
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
40 #
40 #
41 # (You could use any subset of the tests: test-s* happens to match
41 # (You could use any subset of the tests: test-s* happens to match
42 # enough that it's worth doing parallel runs, few enough that it
42 # enough that it's worth doing parallel runs, few enough that it
43 # completes fairly quickly, includes both shell and Python scripts, and
43 # completes fairly quickly, includes both shell and Python scripts, and
44 # includes some scripts that run daemon processes.)
44 # includes some scripts that run daemon processes.)
45
45
46 from __future__ import absolute_import, print_function
46 from __future__ import absolute_import, print_function
47
47
48 import argparse
48 import argparse
49 import collections
49 import collections
50 import difflib
50 import difflib
51 import distutils.version as version
51 import distutils.version as version
52 import errno
52 import errno
53 import json
53 import json
54 import os
54 import os
55 import random
55 import random
56 import re
56 import re
57 import shutil
57 import shutil
58 import signal
58 import signal
59 import socket
59 import socket
60 import subprocess
60 import subprocess
61 import sys
61 import sys
62 import sysconfig
62 import sysconfig
63 import tempfile
63 import tempfile
64 import threading
64 import threading
65 import time
65 import time
66 import unittest
66 import unittest
67 import xml.dom.minidom as minidom
67 import xml.dom.minidom as minidom
68
68
69 try:
69 try:
70 import Queue as queue
70 import Queue as queue
71 except ImportError:
71 except ImportError:
72 import queue
72 import queue
73
73
74 try:
74 try:
75 import shlex
75 import shlex
76 shellquote = shlex.quote
76 shellquote = shlex.quote
77 except (ImportError, AttributeError):
77 except (ImportError, AttributeError):
78 import pipes
78 import pipes
79 shellquote = pipes.quote
79 shellquote = pipes.quote
80
80
81 if os.environ.get('RTUNICODEPEDANTRY', False):
81 if os.environ.get('RTUNICODEPEDANTRY', False):
82 try:
82 try:
83 reload(sys)
83 reload(sys)
84 sys.setdefaultencoding("undefined")
84 sys.setdefaultencoding("undefined")
85 except NameError:
85 except NameError:
86 pass
86 pass
87
87
88 origenviron = os.environ.copy()
88 origenviron = os.environ.copy()
89 osenvironb = getattr(os, 'environb', os.environ)
89 osenvironb = getattr(os, 'environb', os.environ)
90 processlock = threading.Lock()
90 processlock = threading.Lock()
91
91
92 pygmentspresent = False
92 pygmentspresent = False
93 # ANSI color is unsupported prior to Windows 10
93 # ANSI color is unsupported prior to Windows 10
94 if os.name != 'nt':
94 if os.name != 'nt':
95 try: # is pygments installed
95 try: # is pygments installed
96 import pygments
96 import pygments
97 import pygments.lexers as lexers
97 import pygments.lexers as lexers
98 import pygments.lexer as lexer
98 import pygments.lexer as lexer
99 import pygments.formatters as formatters
99 import pygments.formatters as formatters
100 import pygments.token as token
100 import pygments.token as token
101 import pygments.style as style
101 import pygments.style as style
102 pygmentspresent = True
102 pygmentspresent = True
103 difflexer = lexers.DiffLexer()
103 difflexer = lexers.DiffLexer()
104 terminal256formatter = formatters.Terminal256Formatter()
104 terminal256formatter = formatters.Terminal256Formatter()
105 except ImportError:
105 except ImportError:
106 pass
106 pass
107
107
108 if pygmentspresent:
108 if pygmentspresent:
109 class TestRunnerStyle(style.Style):
109 class TestRunnerStyle(style.Style):
110 default_style = ""
110 default_style = ""
111 skipped = token.string_to_tokentype("Token.Generic.Skipped")
111 skipped = token.string_to_tokentype("Token.Generic.Skipped")
112 failed = token.string_to_tokentype("Token.Generic.Failed")
112 failed = token.string_to_tokentype("Token.Generic.Failed")
113 skippedname = token.string_to_tokentype("Token.Generic.SName")
113 skippedname = token.string_to_tokentype("Token.Generic.SName")
114 failedname = token.string_to_tokentype("Token.Generic.FName")
114 failedname = token.string_to_tokentype("Token.Generic.FName")
115 styles = {
115 styles = {
116 skipped: '#e5e5e5',
116 skipped: '#e5e5e5',
117 skippedname: '#00ffff',
117 skippedname: '#00ffff',
118 failed: '#7f0000',
118 failed: '#7f0000',
119 failedname: '#ff0000',
119 failedname: '#ff0000',
120 }
120 }
121
121
122 class TestRunnerLexer(lexer.RegexLexer):
122 class TestRunnerLexer(lexer.RegexLexer):
123 tokens = {
123 tokens = {
124 'root': [
124 'root': [
125 (r'^Skipped', token.Generic.Skipped, 'skipped'),
125 (r'^Skipped', token.Generic.Skipped, 'skipped'),
126 (r'^Failed ', token.Generic.Failed, 'failed'),
126 (r'^Failed ', token.Generic.Failed, 'failed'),
127 (r'^ERROR: ', token.Generic.Failed, 'failed'),
127 (r'^ERROR: ', token.Generic.Failed, 'failed'),
128 ],
128 ],
129 'skipped': [
129 'skipped': [
130 (r'[\w-]+\.(t|py)', token.Generic.SName),
130 (r'[\w-]+\.(t|py)', token.Generic.SName),
131 (r':.*', token.Generic.Skipped),
131 (r':.*', token.Generic.Skipped),
132 ],
132 ],
133 'failed': [
133 'failed': [
134 (r'[\w-]+\.(t|py)', token.Generic.FName),
134 (r'[\w-]+\.(t|py)', token.Generic.FName),
135 (r'(:| ).*', token.Generic.Failed),
135 (r'(:| ).*', token.Generic.Failed),
136 ]
136 ]
137 }
137 }
138
138
139 runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
139 runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
140 runnerlexer = TestRunnerLexer()
140 runnerlexer = TestRunnerLexer()
141
141
142 if sys.version_info > (3, 5, 0):
142 if sys.version_info > (3, 5, 0):
143 PYTHON3 = True
143 PYTHON3 = True
144 xrange = range # we use xrange in one place, and we'd rather not use range
144 xrange = range # we use xrange in one place, and we'd rather not use range
145 def _bytespath(p):
145 def _bytespath(p):
146 if p is None:
146 if p is None:
147 return p
147 return p
148 return p.encode('utf-8')
148 return p.encode('utf-8')
149
149
150 def _strpath(p):
150 def _strpath(p):
151 if p is None:
151 if p is None:
152 return p
152 return p
153 return p.decode('utf-8')
153 return p.decode('utf-8')
154
154
155 elif sys.version_info >= (3, 0, 0):
155 elif sys.version_info >= (3, 0, 0):
156 print('%s is only supported on Python 3.5+ and 2.7, not %s' %
156 print('%s is only supported on Python 3.5+ and 2.7, not %s' %
157 (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
157 (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
158 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
158 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
159 else:
159 else:
160 PYTHON3 = False
160 PYTHON3 = False
161
161
162 # In python 2.x, path operations are generally done using
162 # In python 2.x, path operations are generally done using
163 # bytestrings by default, so we don't have to do any extra
163 # bytestrings by default, so we don't have to do any extra
164 # fiddling there. We define the wrapper functions anyway just to
164 # fiddling there. We define the wrapper functions anyway just to
165 # help keep code consistent between platforms.
165 # help keep code consistent between platforms.
166 def _bytespath(p):
166 def _bytespath(p):
167 return p
167 return p
168
168
169 _strpath = _bytespath
169 _strpath = _bytespath
170
170
171 # For Windows support
171 # For Windows support
172 wifexited = getattr(os, "WIFEXITED", lambda x: False)
172 wifexited = getattr(os, "WIFEXITED", lambda x: False)
173
173
174 # Whether to use IPv6
174 # Whether to use IPv6
175 def checksocketfamily(name, port=20058):
175 def checksocketfamily(name, port=20058):
176 """return true if we can listen on localhost using family=name
176 """return true if we can listen on localhost using family=name
177
177
178 name should be either 'AF_INET', or 'AF_INET6'.
178 name should be either 'AF_INET', or 'AF_INET6'.
179 port being used is okay - EADDRINUSE is considered as successful.
179 port being used is okay - EADDRINUSE is considered as successful.
180 """
180 """
181 family = getattr(socket, name, None)
181 family = getattr(socket, name, None)
182 if family is None:
182 if family is None:
183 return False
183 return False
184 try:
184 try:
185 s = socket.socket(family, socket.SOCK_STREAM)
185 s = socket.socket(family, socket.SOCK_STREAM)
186 s.bind(('localhost', port))
186 s.bind(('localhost', port))
187 s.close()
187 s.close()
188 return True
188 return True
189 except socket.error as exc:
189 except socket.error as exc:
190 if exc.errno == errno.EADDRINUSE:
190 if exc.errno == errno.EADDRINUSE:
191 return True
191 return True
192 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
192 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
193 return False
193 return False
194 else:
194 else:
195 raise
195 raise
196 else:
196 else:
197 return False
197 return False
198
198
199 # useipv6 will be set by parseargs
199 # useipv6 will be set by parseargs
200 useipv6 = None
200 useipv6 = None
201
201
202 def checkportisavailable(port):
202 def checkportisavailable(port):
203 """return true if a port seems free to bind on localhost"""
203 """return true if a port seems free to bind on localhost"""
204 if useipv6:
204 if useipv6:
205 family = socket.AF_INET6
205 family = socket.AF_INET6
206 else:
206 else:
207 family = socket.AF_INET
207 family = socket.AF_INET
208 try:
208 try:
209 s = socket.socket(family, socket.SOCK_STREAM)
209 s = socket.socket(family, socket.SOCK_STREAM)
210 s.bind(('localhost', port))
210 s.bind(('localhost', port))
211 s.close()
211 s.close()
212 return True
212 return True
213 except socket.error as exc:
213 except socket.error as exc:
214 if exc.errno not in (errno.EADDRINUSE, errno.EADDRNOTAVAIL,
214 if exc.errno not in (errno.EADDRINUSE, errno.EADDRNOTAVAIL,
215 errno.EPROTONOSUPPORT):
215 errno.EPROTONOSUPPORT):
216 raise
216 raise
217 return False
217 return False
218
218
219 closefds = os.name == 'posix'
219 closefds = os.name == 'posix'
220 def Popen4(cmd, wd, timeout, env=None):
220 def Popen4(cmd, wd, timeout, env=None):
221 processlock.acquire()
221 processlock.acquire()
222 p = subprocess.Popen(cmd, shell=True, bufsize=-1, cwd=wd, env=env,
222 p = subprocess.Popen(cmd, shell=True, bufsize=-1, cwd=wd, env=env,
223 close_fds=closefds,
223 close_fds=closefds,
224 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
224 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
225 stderr=subprocess.STDOUT)
225 stderr=subprocess.STDOUT)
226 processlock.release()
226 processlock.release()
227
227
228 p.fromchild = p.stdout
228 p.fromchild = p.stdout
229 p.tochild = p.stdin
229 p.tochild = p.stdin
230 p.childerr = p.stderr
230 p.childerr = p.stderr
231
231
232 p.timeout = False
232 p.timeout = False
233 if timeout:
233 if timeout:
234 def t():
234 def t():
235 start = time.time()
235 start = time.time()
236 while time.time() - start < timeout and p.returncode is None:
236 while time.time() - start < timeout and p.returncode is None:
237 time.sleep(.1)
237 time.sleep(.1)
238 p.timeout = True
238 p.timeout = True
239 if p.returncode is None:
239 if p.returncode is None:
240 terminate(p)
240 terminate(p)
241 threading.Thread(target=t).start()
241 threading.Thread(target=t).start()
242
242
243 return p
243 return p
244
244
245 PYTHON = _bytespath(sys.executable.replace('\\', '/'))
245 PYTHON = _bytespath(sys.executable.replace('\\', '/'))
246 IMPL_PATH = b'PYTHONPATH'
246 IMPL_PATH = b'PYTHONPATH'
247 if 'java' in sys.platform:
247 if 'java' in sys.platform:
248 IMPL_PATH = b'JYTHONPATH'
248 IMPL_PATH = b'JYTHONPATH'
249
249
250 defaults = {
250 defaults = {
251 'jobs': ('HGTEST_JOBS', 1),
251 'jobs': ('HGTEST_JOBS', 1),
252 'timeout': ('HGTEST_TIMEOUT', 180),
252 'timeout': ('HGTEST_TIMEOUT', 180),
253 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 500),
253 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 500),
254 'port': ('HGTEST_PORT', 20059),
254 'port': ('HGTEST_PORT', 20059),
255 'shell': ('HGTEST_SHELL', 'sh'),
255 'shell': ('HGTEST_SHELL', 'sh'),
256 }
256 }
257
257
258 def canonpath(path):
258 def canonpath(path):
259 return os.path.realpath(os.path.expanduser(path))
259 return os.path.realpath(os.path.expanduser(path))
260
260
261 def parselistfiles(files, listtype, warn=True):
261 def parselistfiles(files, listtype, warn=True):
262 entries = dict()
262 entries = dict()
263 for filename in files:
263 for filename in files:
264 try:
264 try:
265 path = os.path.expanduser(os.path.expandvars(filename))
265 path = os.path.expanduser(os.path.expandvars(filename))
266 f = open(path, "rb")
266 f = open(path, "rb")
267 except IOError as err:
267 except IOError as err:
268 if err.errno != errno.ENOENT:
268 if err.errno != errno.ENOENT:
269 raise
269 raise
270 if warn:
270 if warn:
271 print("warning: no such %s file: %s" % (listtype, filename))
271 print("warning: no such %s file: %s" % (listtype, filename))
272 continue
272 continue
273
273
274 for line in f.readlines():
274 for line in f.readlines():
275 line = line.split(b'#', 1)[0].strip()
275 line = line.split(b'#', 1)[0].strip()
276 if line:
276 if line:
277 entries[line] = filename
277 entries[line] = filename
278
278
279 f.close()
279 f.close()
280 return entries
280 return entries
281
281
282 def parsettestcases(path):
282 def parsettestcases(path):
283 """read a .t test file, return a set of test case names
283 """read a .t test file, return a set of test case names
284
284
285 If path does not exist, return an empty set.
285 If path does not exist, return an empty set.
286 """
286 """
287 cases = set()
287 cases = set()
288 try:
288 try:
289 with open(path, 'rb') as f:
289 with open(path, 'rb') as f:
290 for l in f:
290 for l in f:
291 if l.startswith(b'#testcases '):
291 if l.startswith(b'#testcases '):
292 cases.update(l[11:].split())
292 cases.update(l[11:].split())
293 except IOError as ex:
293 except IOError as ex:
294 if ex.errno != errno.ENOENT:
294 if ex.errno != errno.ENOENT:
295 raise
295 raise
296 return cases
296 return cases
297
297
298 def getparser():
298 def getparser():
299 """Obtain the OptionParser used by the CLI."""
299 """Obtain the OptionParser used by the CLI."""
300 parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
300 parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
301
301
302 selection = parser.add_argument_group('Test Selection')
302 selection = parser.add_argument_group('Test Selection')
303 selection.add_argument('--allow-slow-tests', action='store_true',
303 selection.add_argument('--allow-slow-tests', action='store_true',
304 help='allow extremely slow tests')
304 help='allow extremely slow tests')
305 selection.add_argument("--blacklist", action="append",
305 selection.add_argument("--blacklist", action="append",
306 help="skip tests listed in the specified blacklist file")
306 help="skip tests listed in the specified blacklist file")
307 selection.add_argument("--changed",
307 selection.add_argument("--changed",
308 help="run tests that are changed in parent rev or working directory")
308 help="run tests that are changed in parent rev or working directory")
309 selection.add_argument("-k", "--keywords",
309 selection.add_argument("-k", "--keywords",
310 help="run tests matching keywords")
310 help="run tests matching keywords")
311 selection.add_argument("-r", "--retest", action="store_true",
311 selection.add_argument("-r", "--retest", action="store_true",
312 help = "retest failed tests")
312 help = "retest failed tests")
313 selection.add_argument("--test-list", action="append",
313 selection.add_argument("--test-list", action="append",
314 help="read tests to run from the specified file")
314 help="read tests to run from the specified file")
315 selection.add_argument("--whitelist", action="append",
315 selection.add_argument("--whitelist", action="append",
316 help="always run tests listed in the specified whitelist file")
316 help="always run tests listed in the specified whitelist file")
317 selection.add_argument('tests', metavar='TESTS', nargs='*',
317 selection.add_argument('tests', metavar='TESTS', nargs='*',
318 help='Tests to run')
318 help='Tests to run')
319
319
320 harness = parser.add_argument_group('Test Harness Behavior')
320 harness = parser.add_argument_group('Test Harness Behavior')
321 harness.add_argument('--bisect-repo',
321 harness.add_argument('--bisect-repo',
322 metavar='bisect_repo',
322 metavar='bisect_repo',
323 help=("Path of a repo to bisect. Use together with "
323 help=("Path of a repo to bisect. Use together with "
324 "--known-good-rev"))
324 "--known-good-rev"))
325 harness.add_argument("-d", "--debug", action="store_true",
325 harness.add_argument("-d", "--debug", action="store_true",
326 help="debug mode: write output of test scripts to console"
326 help="debug mode: write output of test scripts to console"
327 " rather than capturing and diffing it (disables timeout)")
327 " rather than capturing and diffing it (disables timeout)")
328 harness.add_argument("-f", "--first", action="store_true",
328 harness.add_argument("-f", "--first", action="store_true",
329 help="exit on the first test failure")
329 help="exit on the first test failure")
330 harness.add_argument("-i", "--interactive", action="store_true",
330 harness.add_argument("-i", "--interactive", action="store_true",
331 help="prompt to accept changed output")
331 help="prompt to accept changed output")
332 harness.add_argument("-j", "--jobs", type=int,
332 harness.add_argument("-j", "--jobs", type=int,
333 help="number of jobs to run in parallel"
333 help="number of jobs to run in parallel"
334 " (default: $%s or %d)" % defaults['jobs'])
334 " (default: $%s or %d)" % defaults['jobs'])
335 harness.add_argument("--keep-tmpdir", action="store_true",
335 harness.add_argument("--keep-tmpdir", action="store_true",
336 help="keep temporary directory after running tests")
336 help="keep temporary directory after running tests")
337 harness.add_argument('--known-good-rev',
337 harness.add_argument('--known-good-rev',
338 metavar="known_good_rev",
338 metavar="known_good_rev",
339 help=("Automatically bisect any failures using this "
339 help=("Automatically bisect any failures using this "
340 "revision as a known-good revision."))
340 "revision as a known-good revision."))
341 harness.add_argument("--list-tests", action="store_true",
341 harness.add_argument("--list-tests", action="store_true",
342 help="list tests instead of running them")
342 help="list tests instead of running them")
343 harness.add_argument("--loop", action="store_true",
343 harness.add_argument("--loop", action="store_true",
344 help="loop tests repeatedly")
344 help="loop tests repeatedly")
345 harness.add_argument('--random', action="store_true",
345 harness.add_argument('--random', action="store_true",
346 help='run tests in random order')
346 help='run tests in random order')
347 harness.add_argument("-p", "--port", type=int,
347 harness.add_argument("-p", "--port", type=int,
348 help="port on which servers should listen"
348 help="port on which servers should listen"
349 " (default: $%s or %d)" % defaults['port'])
349 " (default: $%s or %d)" % defaults['port'])
350 harness.add_argument('--profile-runner', action='store_true',
350 harness.add_argument('--profile-runner', action='store_true',
351 help='run statprof on run-tests')
351 help='run statprof on run-tests')
352 harness.add_argument("-R", "--restart", action="store_true",
352 harness.add_argument("-R", "--restart", action="store_true",
353 help="restart at last error")
353 help="restart at last error")
354 harness.add_argument("--runs-per-test", type=int, dest="runs_per_test",
354 harness.add_argument("--runs-per-test", type=int, dest="runs_per_test",
355 help="run each test N times (default=1)", default=1)
355 help="run each test N times (default=1)", default=1)
356 harness.add_argument("--shell",
356 harness.add_argument("--shell",
357 help="shell to use (default: $%s or %s)" % defaults['shell'])
357 help="shell to use (default: $%s or %s)" % defaults['shell'])
358 harness.add_argument('--showchannels', action='store_true',
358 harness.add_argument('--showchannels', action='store_true',
359 help='show scheduling channels')
359 help='show scheduling channels')
360 harness.add_argument("--slowtimeout", type=int,
360 harness.add_argument("--slowtimeout", type=int,
361 help="kill errant slow tests after SLOWTIMEOUT seconds"
361 help="kill errant slow tests after SLOWTIMEOUT seconds"
362 " (default: $%s or %d)" % defaults['slowtimeout'])
362 " (default: $%s or %d)" % defaults['slowtimeout'])
363 harness.add_argument("-t", "--timeout", type=int,
363 harness.add_argument("-t", "--timeout", type=int,
364 help="kill errant tests after TIMEOUT seconds"
364 help="kill errant tests after TIMEOUT seconds"
365 " (default: $%s or %d)" % defaults['timeout'])
365 " (default: $%s or %d)" % defaults['timeout'])
366 harness.add_argument("--tmpdir",
366 harness.add_argument("--tmpdir",
367 help="run tests in the given temporary directory"
367 help="run tests in the given temporary directory"
368 " (implies --keep-tmpdir)")
368 " (implies --keep-tmpdir)")
369 harness.add_argument("-v", "--verbose", action="store_true",
369 harness.add_argument("-v", "--verbose", action="store_true",
370 help="output verbose messages")
370 help="output verbose messages")
371
371
372 hgconf = parser.add_argument_group('Mercurial Configuration')
372 hgconf = parser.add_argument_group('Mercurial Configuration')
373 hgconf.add_argument("--chg", action="store_true",
373 hgconf.add_argument("--chg", action="store_true",
374 help="install and use chg wrapper in place of hg")
374 help="install and use chg wrapper in place of hg")
375 hgconf.add_argument("--compiler",
375 hgconf.add_argument("--compiler",
376 help="compiler to build with")
376 help="compiler to build with")
377 hgconf.add_argument('--extra-config-opt', action="append", default=[],
377 hgconf.add_argument('--extra-config-opt', action="append", default=[],
378 help='set the given config opt in the test hgrc')
378 help='set the given config opt in the test hgrc')
379 hgconf.add_argument("-l", "--local", action="store_true",
379 hgconf.add_argument("-l", "--local", action="store_true",
380 help="shortcut for --with-hg=<testdir>/../hg, "
380 help="shortcut for --with-hg=<testdir>/../hg, "
381 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set")
381 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set")
382 hgconf.add_argument("--ipv6", action="store_true",
382 hgconf.add_argument("--ipv6", action="store_true",
383 help="prefer IPv6 to IPv4 for network related tests")
383 help="prefer IPv6 to IPv4 for network related tests")
384 hgconf.add_argument("--pure", action="store_true",
384 hgconf.add_argument("--pure", action="store_true",
385 help="use pure Python code instead of C extensions")
385 help="use pure Python code instead of C extensions")
386 hgconf.add_argument("-3", "--py3k-warnings", action="store_true",
386 hgconf.add_argument("-3", "--py3k-warnings", action="store_true",
387 help="enable Py3k warnings on Python 2.7+")
387 help="enable Py3k warnings on Python 2.7+")
388 hgconf.add_argument("--with-chg", metavar="CHG",
388 hgconf.add_argument("--with-chg", metavar="CHG",
389 help="use specified chg wrapper in place of hg")
389 help="use specified chg wrapper in place of hg")
390 hgconf.add_argument("--with-hg",
390 hgconf.add_argument("--with-hg",
391 metavar="HG",
391 metavar="HG",
392 help="test using specified hg script rather than a "
392 help="test using specified hg script rather than a "
393 "temporary installation")
393 "temporary installation")
394 # This option should be deleted once test-check-py3-compat.t and other
394 # This option should be deleted once test-check-py3-compat.t and other
395 # Python 3 tests run with Python 3.
395 # Python 3 tests run with Python 3.
396 hgconf.add_argument("--with-python3", metavar="PYTHON3",
396 hgconf.add_argument("--with-python3", metavar="PYTHON3",
397 help="Python 3 interpreter (if running under Python 2)"
397 help="Python 3 interpreter (if running under Python 2)"
398 " (TEMPORARY)")
398 " (TEMPORARY)")
399
399
400 reporting = parser.add_argument_group('Results Reporting')
400 reporting = parser.add_argument_group('Results Reporting')
401 reporting.add_argument("-C", "--annotate", action="store_true",
401 reporting.add_argument("-C", "--annotate", action="store_true",
402 help="output files annotated with coverage")
402 help="output files annotated with coverage")
403 reporting.add_argument("--color", choices=["always", "auto", "never"],
403 reporting.add_argument("--color", choices=["always", "auto", "never"],
404 default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
404 default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
405 help="colorisation: always|auto|never (default: auto)")
405 help="colorisation: always|auto|never (default: auto)")
406 reporting.add_argument("-c", "--cover", action="store_true",
406 reporting.add_argument("-c", "--cover", action="store_true",
407 help="print a test coverage report")
407 help="print a test coverage report")
408 reporting.add_argument('--exceptions', action='store_true',
408 reporting.add_argument('--exceptions', action='store_true',
409 help='log all exceptions and generate an exception report')
409 help='log all exceptions and generate an exception report')
410 reporting.add_argument("-H", "--htmlcov", action="store_true",
410 reporting.add_argument("-H", "--htmlcov", action="store_true",
411 help="create an HTML report of the coverage of the files")
411 help="create an HTML report of the coverage of the files")
412 reporting.add_argument("--json", action="store_true",
412 reporting.add_argument("--json", action="store_true",
413 help="store test result data in 'report.json' file")
413 help="store test result data in 'report.json' file")
414 reporting.add_argument("--outputdir",
414 reporting.add_argument("--outputdir",
415 help="directory to write error logs to (default=test directory)")
415 help="directory to write error logs to (default=test directory)")
416 reporting.add_argument("-n", "--nodiff", action="store_true",
416 reporting.add_argument("-n", "--nodiff", action="store_true",
417 help="skip showing test changes")
417 help="skip showing test changes")
418 reporting.add_argument("-S", "--noskips", action="store_true",
418 reporting.add_argument("-S", "--noskips", action="store_true",
419 help="don't report skip tests verbosely")
419 help="don't report skip tests verbosely")
420 reporting.add_argument("--time", action="store_true",
420 reporting.add_argument("--time", action="store_true",
421 help="time how long each test takes")
421 help="time how long each test takes")
422 reporting.add_argument("--view",
422 reporting.add_argument("--view",
423 help="external diff viewer")
423 help="external diff viewer")
424 reporting.add_argument("--xunit",
424 reporting.add_argument("--xunit",
425 help="record xunit results at specified path")
425 help="record xunit results at specified path")
426
426
427 for option, (envvar, default) in defaults.items():
427 for option, (envvar, default) in defaults.items():
428 defaults[option] = type(default)(os.environ.get(envvar, default))
428 defaults[option] = type(default)(os.environ.get(envvar, default))
429 parser.set_defaults(**defaults)
429 parser.set_defaults(**defaults)
430
430
431 return parser
431 return parser
432
432
433 def parseargs(args, parser):
433 def parseargs(args, parser):
434 """Parse arguments with our OptionParser and validate results."""
434 """Parse arguments with our OptionParser and validate results."""
435 options = parser.parse_args(args)
435 options = parser.parse_args(args)
436
436
437 # jython is always pure
437 # jython is always pure
438 if 'java' in sys.platform or '__pypy__' in sys.modules:
438 if 'java' in sys.platform or '__pypy__' in sys.modules:
439 options.pure = True
439 options.pure = True
440
440
441 if options.with_hg:
441 if options.with_hg:
442 options.with_hg = canonpath(_bytespath(options.with_hg))
442 options.with_hg = canonpath(_bytespath(options.with_hg))
443 if not (os.path.isfile(options.with_hg) and
443 if not (os.path.isfile(options.with_hg) and
444 os.access(options.with_hg, os.X_OK)):
444 os.access(options.with_hg, os.X_OK)):
445 parser.error('--with-hg must specify an executable hg script')
445 parser.error('--with-hg must specify an executable hg script')
446 if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
446 if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
447 sys.stderr.write('warning: --with-hg should specify an hg script\n')
447 sys.stderr.write('warning: --with-hg should specify an hg script\n')
448 if options.local:
448 if options.local:
449 testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
449 testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
450 reporootdir = os.path.dirname(testdir)
450 reporootdir = os.path.dirname(testdir)
451 pathandattrs = [(b'hg', 'with_hg')]
451 pathandattrs = [(b'hg', 'with_hg')]
452 if options.chg:
452 if options.chg:
453 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
453 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
454 for relpath, attr in pathandattrs:
454 for relpath, attr in pathandattrs:
455 binpath = os.path.join(reporootdir, relpath)
455 binpath = os.path.join(reporootdir, relpath)
456 if os.name != 'nt' and not os.access(binpath, os.X_OK):
456 if os.name != 'nt' and not os.access(binpath, os.X_OK):
457 parser.error('--local specified, but %r not found or '
457 parser.error('--local specified, but %r not found or '
458 'not executable' % binpath)
458 'not executable' % binpath)
459 setattr(options, attr, binpath)
459 setattr(options, attr, binpath)
460
460
461 if (options.chg or options.with_chg) and os.name == 'nt':
461 if (options.chg or options.with_chg) and os.name == 'nt':
462 parser.error('chg does not work on %s' % os.name)
462 parser.error('chg does not work on %s' % os.name)
463 if options.with_chg:
463 if options.with_chg:
464 options.chg = False # no installation to temporary location
464 options.chg = False # no installation to temporary location
465 options.with_chg = canonpath(_bytespath(options.with_chg))
465 options.with_chg = canonpath(_bytespath(options.with_chg))
466 if not (os.path.isfile(options.with_chg) and
466 if not (os.path.isfile(options.with_chg) and
467 os.access(options.with_chg, os.X_OK)):
467 os.access(options.with_chg, os.X_OK)):
468 parser.error('--with-chg must specify a chg executable')
468 parser.error('--with-chg must specify a chg executable')
469 if options.chg and options.with_hg:
469 if options.chg and options.with_hg:
470 # chg shares installation location with hg
470 # chg shares installation location with hg
471 parser.error('--chg does not work when --with-hg is specified '
471 parser.error('--chg does not work when --with-hg is specified '
472 '(use --with-chg instead)')
472 '(use --with-chg instead)')
473
473
474 if options.color == 'always' and not pygmentspresent:
474 if options.color == 'always' and not pygmentspresent:
475 sys.stderr.write('warning: --color=always ignored because '
475 sys.stderr.write('warning: --color=always ignored because '
476 'pygments is not installed\n')
476 'pygments is not installed\n')
477
477
478 if options.bisect_repo and not options.known_good_rev:
478 if options.bisect_repo and not options.known_good_rev:
479 parser.error("--bisect-repo cannot be used without --known-good-rev")
479 parser.error("--bisect-repo cannot be used without --known-good-rev")
480
480
481 global useipv6
481 global useipv6
482 if options.ipv6:
482 if options.ipv6:
483 useipv6 = checksocketfamily('AF_INET6')
483 useipv6 = checksocketfamily('AF_INET6')
484 else:
484 else:
485 # only use IPv6 if IPv4 is unavailable and IPv6 is available
485 # only use IPv6 if IPv4 is unavailable and IPv6 is available
486 useipv6 = ((not checksocketfamily('AF_INET'))
486 useipv6 = ((not checksocketfamily('AF_INET'))
487 and checksocketfamily('AF_INET6'))
487 and checksocketfamily('AF_INET6'))
488
488
489 options.anycoverage = options.cover or options.annotate or options.htmlcov
489 options.anycoverage = options.cover or options.annotate or options.htmlcov
490 if options.anycoverage:
490 if options.anycoverage:
491 try:
491 try:
492 import coverage
492 import coverage
493 covver = version.StrictVersion(coverage.__version__).version
493 covver = version.StrictVersion(coverage.__version__).version
494 if covver < (3, 3):
494 if covver < (3, 3):
495 parser.error('coverage options require coverage 3.3 or later')
495 parser.error('coverage options require coverage 3.3 or later')
496 except ImportError:
496 except ImportError:
497 parser.error('coverage options now require the coverage package')
497 parser.error('coverage options now require the coverage package')
498
498
499 if options.anycoverage and options.local:
499 if options.anycoverage and options.local:
500 # this needs some path mangling somewhere, I guess
500 # this needs some path mangling somewhere, I guess
501 parser.error("sorry, coverage options do not work when --local "
501 parser.error("sorry, coverage options do not work when --local "
502 "is specified")
502 "is specified")
503
503
504 if options.anycoverage and options.with_hg:
504 if options.anycoverage and options.with_hg:
505 parser.error("sorry, coverage options do not work when --with-hg "
505 parser.error("sorry, coverage options do not work when --with-hg "
506 "is specified")
506 "is specified")
507
507
508 global verbose
508 global verbose
509 if options.verbose:
509 if options.verbose:
510 verbose = ''
510 verbose = ''
511
511
512 if options.tmpdir:
512 if options.tmpdir:
513 options.tmpdir = canonpath(options.tmpdir)
513 options.tmpdir = canonpath(options.tmpdir)
514
514
515 if options.jobs < 1:
515 if options.jobs < 1:
516 parser.error('--jobs must be positive')
516 parser.error('--jobs must be positive')
517 if options.interactive and options.debug:
517 if options.interactive and options.debug:
518 parser.error("-i/--interactive and -d/--debug are incompatible")
518 parser.error("-i/--interactive and -d/--debug are incompatible")
519 if options.debug:
519 if options.debug:
520 if options.timeout != defaults['timeout']:
520 if options.timeout != defaults['timeout']:
521 sys.stderr.write(
521 sys.stderr.write(
522 'warning: --timeout option ignored with --debug\n')
522 'warning: --timeout option ignored with --debug\n')
523 if options.slowtimeout != defaults['slowtimeout']:
523 if options.slowtimeout != defaults['slowtimeout']:
524 sys.stderr.write(
524 sys.stderr.write(
525 'warning: --slowtimeout option ignored with --debug\n')
525 'warning: --slowtimeout option ignored with --debug\n')
526 options.timeout = 0
526 options.timeout = 0
527 options.slowtimeout = 0
527 options.slowtimeout = 0
528 if options.py3k_warnings:
528 if options.py3k_warnings:
529 if PYTHON3:
529 if PYTHON3:
530 parser.error(
530 parser.error(
531 '--py3k-warnings can only be used on Python 2.7')
531 '--py3k-warnings can only be used on Python 2.7')
532 if options.with_python3:
532 if options.with_python3:
533 if PYTHON3:
533 if PYTHON3:
534 parser.error('--with-python3 cannot be used when executing with '
534 parser.error('--with-python3 cannot be used when executing with '
535 'Python 3')
535 'Python 3')
536
536
537 options.with_python3 = canonpath(options.with_python3)
537 options.with_python3 = canonpath(options.with_python3)
538 # Verify Python3 executable is acceptable.
538 # Verify Python3 executable is acceptable.
539 proc = subprocess.Popen([options.with_python3, b'--version'],
539 proc = subprocess.Popen([options.with_python3, b'--version'],
540 stdout=subprocess.PIPE,
540 stdout=subprocess.PIPE,
541 stderr=subprocess.STDOUT)
541 stderr=subprocess.STDOUT)
542 out, _err = proc.communicate()
542 out, _err = proc.communicate()
543 ret = proc.wait()
543 ret = proc.wait()
544 if ret != 0:
544 if ret != 0:
545 parser.error('could not determine version of python 3')
545 parser.error('could not determine version of python 3')
546 if not out.startswith('Python '):
546 if not out.startswith('Python '):
547 parser.error('unexpected output from python3 --version: %s' %
547 parser.error('unexpected output from python3 --version: %s' %
548 out)
548 out)
549 vers = version.LooseVersion(out[len('Python '):])
549 vers = version.LooseVersion(out[len('Python '):])
550 if vers < version.LooseVersion('3.5.0'):
550 if vers < version.LooseVersion('3.5.0'):
551 parser.error('--with-python3 version must be 3.5.0 or greater; '
551 parser.error('--with-python3 version must be 3.5.0 or greater; '
552 'got %s' % out)
552 'got %s' % out)
553
553
554 if options.blacklist:
554 if options.blacklist:
555 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
555 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
556 if options.whitelist:
556 if options.whitelist:
557 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
557 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
558 else:
558 else:
559 options.whitelisted = {}
559 options.whitelisted = {}
560
560
561 if options.showchannels:
561 if options.showchannels:
562 options.nodiff = True
562 options.nodiff = True
563
563
564 return options
564 return options
565
565
566 def rename(src, dst):
566 def rename(src, dst):
567 """Like os.rename(), trade atomicity and opened files friendliness
567 """Like os.rename(), trade atomicity and opened files friendliness
568 for existing destination support.
568 for existing destination support.
569 """
569 """
570 shutil.copy(src, dst)
570 shutil.copy(src, dst)
571 os.remove(src)
571 os.remove(src)
572
572
573 _unified_diff = difflib.unified_diff
573 _unified_diff = difflib.unified_diff
574 if PYTHON3:
574 if PYTHON3:
575 import functools
575 import functools
576 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
576 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
577
577
578 def getdiff(expected, output, ref, err):
578 def getdiff(expected, output, ref, err):
579 servefail = False
579 servefail = False
580 lines = []
580 lines = []
581 for line in _unified_diff(expected, output, ref, err):
581 for line in _unified_diff(expected, output, ref, err):
582 if line.startswith(b'+++') or line.startswith(b'---'):
582 if line.startswith(b'+++') or line.startswith(b'---'):
583 line = line.replace(b'\\', b'/')
583 line = line.replace(b'\\', b'/')
584 if line.endswith(b' \n'):
584 if line.endswith(b' \n'):
585 line = line[:-2] + b'\n'
585 line = line[:-2] + b'\n'
586 lines.append(line)
586 lines.append(line)
587 if not servefail and line.startswith(
587 if not servefail and line.startswith(
588 b'+ abort: child process failed to start'):
588 b'+ abort: child process failed to start'):
589 servefail = True
589 servefail = True
590
590
591 return servefail, lines
591 return servefail, lines
592
592
593 verbose = False
593 verbose = False
594 def vlog(*msg):
594 def vlog(*msg):
595 """Log only when in verbose mode."""
595 """Log only when in verbose mode."""
596 if verbose is False:
596 if verbose is False:
597 return
597 return
598
598
599 return log(*msg)
599 return log(*msg)
600
600
601 # Bytes that break XML even in a CDATA block: control characters 0-31
601 # Bytes that break XML even in a CDATA block: control characters 0-31
602 # sans \t, \n and \r
602 # sans \t, \n and \r
603 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
603 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
604
604
605 # Match feature conditionalized output lines in the form, capturing the feature
605 # Match feature conditionalized output lines in the form, capturing the feature
606 # list in group 2, and the preceeding line output in group 1:
606 # list in group 2, and the preceeding line output in group 1:
607 #
607 #
608 # output..output (feature !)\n
608 # output..output (feature !)\n
609 optline = re.compile(b'(.*) \((.+?) !\)\n$')
609 optline = re.compile(b'(.*) \((.+?) !\)\n$')
610
610
611 def cdatasafe(data):
611 def cdatasafe(data):
612 """Make a string safe to include in a CDATA block.
612 """Make a string safe to include in a CDATA block.
613
613
614 Certain control characters are illegal in a CDATA block, and
614 Certain control characters are illegal in a CDATA block, and
615 there's no way to include a ]]> in a CDATA either. This function
615 there's no way to include a ]]> in a CDATA either. This function
616 replaces illegal bytes with ? and adds a space between the ]] so
616 replaces illegal bytes with ? and adds a space between the ]] so
617 that it won't break the CDATA block.
617 that it won't break the CDATA block.
618 """
618 """
619 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
619 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
620
620
621 def log(*msg):
621 def log(*msg):
622 """Log something to stdout.
622 """Log something to stdout.
623
623
624 Arguments are strings to print.
624 Arguments are strings to print.
625 """
625 """
626 with iolock:
626 with iolock:
627 if verbose:
627 if verbose:
628 print(verbose, end=' ')
628 print(verbose, end=' ')
629 for m in msg:
629 for m in msg:
630 print(m, end=' ')
630 print(m, end=' ')
631 print()
631 print()
632 sys.stdout.flush()
632 sys.stdout.flush()
633
633
634 def highlightdiff(line, color):
634 def highlightdiff(line, color):
635 if not color:
635 if not color:
636 return line
636 return line
637 assert pygmentspresent
637 assert pygmentspresent
638 return pygments.highlight(line.decode('latin1'), difflexer,
638 return pygments.highlight(line.decode('latin1'), difflexer,
639 terminal256formatter).encode('latin1')
639 terminal256formatter).encode('latin1')
640
640
641 def highlightmsg(msg, color):
641 def highlightmsg(msg, color):
642 if not color:
642 if not color:
643 return msg
643 return msg
644 assert pygmentspresent
644 assert pygmentspresent
645 return pygments.highlight(msg, runnerlexer, runnerformatter)
645 return pygments.highlight(msg, runnerlexer, runnerformatter)
646
646
647 def terminate(proc):
647 def terminate(proc):
648 """Terminate subprocess"""
648 """Terminate subprocess"""
649 vlog('# Terminating process %d' % proc.pid)
649 vlog('# Terminating process %d' % proc.pid)
650 try:
650 try:
651 proc.terminate()
651 proc.terminate()
652 except OSError:
652 except OSError:
653 pass
653 pass
654
654
655 def killdaemons(pidfile):
655 def killdaemons(pidfile):
656 import killdaemons as killmod
656 import killdaemons as killmod
657 return killmod.killdaemons(pidfile, tryhard=False, remove=True,
657 return killmod.killdaemons(pidfile, tryhard=False, remove=True,
658 logfn=vlog)
658 logfn=vlog)
659
659
660 class Test(unittest.TestCase):
660 class Test(unittest.TestCase):
661 """Encapsulates a single, runnable test.
661 """Encapsulates a single, runnable test.
662
662
663 While this class conforms to the unittest.TestCase API, it differs in that
663 While this class conforms to the unittest.TestCase API, it differs in that
664 instances need to be instantiated manually. (Typically, unittest.TestCase
664 instances need to be instantiated manually. (Typically, unittest.TestCase
665 classes are instantiated automatically by scanning modules.)
665 classes are instantiated automatically by scanning modules.)
666 """
666 """
667
667
668 # Status code reserved for skipped tests (used by hghave).
668 # Status code reserved for skipped tests (used by hghave).
669 SKIPPED_STATUS = 80
669 SKIPPED_STATUS = 80
670
670
671 def __init__(self, path, outputdir, tmpdir, keeptmpdir=False,
671 def __init__(self, path, outputdir, tmpdir, keeptmpdir=False,
672 debug=False,
672 debug=False,
673 timeout=None,
673 timeout=None,
674 startport=None, extraconfigopts=None,
674 startport=None, extraconfigopts=None,
675 py3kwarnings=False, shell=None, hgcommand=None,
675 py3kwarnings=False, shell=None, hgcommand=None,
676 slowtimeout=None, usechg=False,
676 slowtimeout=None, usechg=False,
677 useipv6=False):
677 useipv6=False):
678 """Create a test from parameters.
678 """Create a test from parameters.
679
679
680 path is the full path to the file defining the test.
680 path is the full path to the file defining the test.
681
681
682 tmpdir is the main temporary directory to use for this test.
682 tmpdir is the main temporary directory to use for this test.
683
683
684 keeptmpdir determines whether to keep the test's temporary directory
684 keeptmpdir determines whether to keep the test's temporary directory
685 after execution. It defaults to removal (False).
685 after execution. It defaults to removal (False).
686
686
687 debug mode will make the test execute verbosely, with unfiltered
687 debug mode will make the test execute verbosely, with unfiltered
688 output.
688 output.
689
689
690 timeout controls the maximum run time of the test. It is ignored when
690 timeout controls the maximum run time of the test. It is ignored when
691 debug is True. See slowtimeout for tests with #require slow.
691 debug is True. See slowtimeout for tests with #require slow.
692
692
693 slowtimeout overrides timeout if the test has #require slow.
693 slowtimeout overrides timeout if the test has #require slow.
694
694
695 startport controls the starting port number to use for this test. Each
695 startport controls the starting port number to use for this test. Each
696 test will reserve 3 port numbers for execution. It is the caller's
696 test will reserve 3 port numbers for execution. It is the caller's
697 responsibility to allocate a non-overlapping port range to Test
697 responsibility to allocate a non-overlapping port range to Test
698 instances.
698 instances.
699
699
700 extraconfigopts is an iterable of extra hgrc config options. Values
700 extraconfigopts is an iterable of extra hgrc config options. Values
701 must have the form "key=value" (something understood by hgrc). Values
701 must have the form "key=value" (something understood by hgrc). Values
702 of the form "foo.key=value" will result in "[foo] key=value".
702 of the form "foo.key=value" will result in "[foo] key=value".
703
703
704 py3kwarnings enables Py3k warnings.
704 py3kwarnings enables Py3k warnings.
705
705
706 shell is the shell to execute tests in.
706 shell is the shell to execute tests in.
707 """
707 """
708 if timeout is None:
708 if timeout is None:
709 timeout = defaults['timeout']
709 timeout = defaults['timeout']
710 if startport is None:
710 if startport is None:
711 startport = defaults['port']
711 startport = defaults['port']
712 if slowtimeout is None:
712 if slowtimeout is None:
713 slowtimeout = defaults['slowtimeout']
713 slowtimeout = defaults['slowtimeout']
714 self.path = path
714 self.path = path
715 self.bname = os.path.basename(path)
715 self.bname = os.path.basename(path)
716 self.name = _strpath(self.bname)
716 self.name = _strpath(self.bname)
717 self._testdir = os.path.dirname(path)
717 self._testdir = os.path.dirname(path)
718 self._outputdir = outputdir
718 self._outputdir = outputdir
719 self._tmpname = os.path.basename(path)
719 self._tmpname = os.path.basename(path)
720 self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
720 self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
721
721
722 self._threadtmp = tmpdir
722 self._threadtmp = tmpdir
723 self._keeptmpdir = keeptmpdir
723 self._keeptmpdir = keeptmpdir
724 self._debug = debug
724 self._debug = debug
725 self._timeout = timeout
725 self._timeout = timeout
726 self._slowtimeout = slowtimeout
726 self._slowtimeout = slowtimeout
727 self._startport = startport
727 self._startport = startport
728 self._extraconfigopts = extraconfigopts or []
728 self._extraconfigopts = extraconfigopts or []
729 self._py3kwarnings = py3kwarnings
729 self._py3kwarnings = py3kwarnings
730 self._shell = _bytespath(shell)
730 self._shell = _bytespath(shell)
731 self._hgcommand = hgcommand or b'hg'
731 self._hgcommand = hgcommand or b'hg'
732 self._usechg = usechg
732 self._usechg = usechg
733 self._useipv6 = useipv6
733 self._useipv6 = useipv6
734
734
735 self._aborted = False
735 self._aborted = False
736 self._daemonpids = []
736 self._daemonpids = []
737 self._finished = None
737 self._finished = None
738 self._ret = None
738 self._ret = None
739 self._out = None
739 self._out = None
740 self._skipped = None
740 self._skipped = None
741 self._testtmp = None
741 self._testtmp = None
742 self._chgsockdir = None
742 self._chgsockdir = None
743
743
744 self._refout = self.readrefout()
744 self._refout = self.readrefout()
745
745
746 def readrefout(self):
746 def readrefout(self):
747 """read reference output"""
747 """read reference output"""
748 # If we're not in --debug mode and reference output file exists,
748 # If we're not in --debug mode and reference output file exists,
749 # check test output against it.
749 # check test output against it.
750 if self._debug:
750 if self._debug:
751 return None # to match "out is None"
751 return None # to match "out is None"
752 elif os.path.exists(self.refpath):
752 elif os.path.exists(self.refpath):
753 with open(self.refpath, 'rb') as f:
753 with open(self.refpath, 'rb') as f:
754 return f.read().splitlines(True)
754 return f.read().splitlines(True)
755 else:
755 else:
756 return []
756 return []
757
757
758 # needed to get base class __repr__ running
758 # needed to get base class __repr__ running
759 @property
759 @property
760 def _testMethodName(self):
760 def _testMethodName(self):
761 return self.name
761 return self.name
762
762
763 def __str__(self):
763 def __str__(self):
764 return self.name
764 return self.name
765
765
766 def shortDescription(self):
766 def shortDescription(self):
767 return self.name
767 return self.name
768
768
769 def setUp(self):
769 def setUp(self):
770 """Tasks to perform before run()."""
770 """Tasks to perform before run()."""
771 self._finished = False
771 self._finished = False
772 self._ret = None
772 self._ret = None
773 self._out = None
773 self._out = None
774 self._skipped = None
774 self._skipped = None
775
775
776 try:
776 try:
777 os.mkdir(self._threadtmp)
777 os.mkdir(self._threadtmp)
778 except OSError as e:
778 except OSError as e:
779 if e.errno != errno.EEXIST:
779 if e.errno != errno.EEXIST:
780 raise
780 raise
781
781
782 name = self._tmpname
782 name = self._tmpname
783 self._testtmp = os.path.join(self._threadtmp, name)
783 self._testtmp = os.path.join(self._threadtmp, name)
784 os.mkdir(self._testtmp)
784 os.mkdir(self._testtmp)
785
785
786 # Remove any previous output files.
786 # Remove any previous output files.
787 if os.path.exists(self.errpath):
787 if os.path.exists(self.errpath):
788 try:
788 try:
789 os.remove(self.errpath)
789 os.remove(self.errpath)
790 except OSError as e:
790 except OSError as e:
791 # We might have raced another test to clean up a .err
791 # We might have raced another test to clean up a .err
792 # file, so ignore ENOENT when removing a previous .err
792 # file, so ignore ENOENT when removing a previous .err
793 # file.
793 # file.
794 if e.errno != errno.ENOENT:
794 if e.errno != errno.ENOENT:
795 raise
795 raise
796
796
797 if self._usechg:
797 if self._usechg:
798 self._chgsockdir = os.path.join(self._threadtmp,
798 self._chgsockdir = os.path.join(self._threadtmp,
799 b'%s.chgsock' % name)
799 b'%s.chgsock' % name)
800 os.mkdir(self._chgsockdir)
800 os.mkdir(self._chgsockdir)
801
801
802 def run(self, result):
802 def run(self, result):
803 """Run this test and report results against a TestResult instance."""
803 """Run this test and report results against a TestResult instance."""
804 # This function is extremely similar to unittest.TestCase.run(). Once
804 # This function is extremely similar to unittest.TestCase.run(). Once
805 # we require Python 2.7 (or at least its version of unittest), this
805 # we require Python 2.7 (or at least its version of unittest), this
806 # function can largely go away.
806 # function can largely go away.
807 self._result = result
807 self._result = result
808 result.startTest(self)
808 result.startTest(self)
809 try:
809 try:
810 try:
810 try:
811 self.setUp()
811 self.setUp()
812 except (KeyboardInterrupt, SystemExit):
812 except (KeyboardInterrupt, SystemExit):
813 self._aborted = True
813 self._aborted = True
814 raise
814 raise
815 except Exception:
815 except Exception:
816 result.addError(self, sys.exc_info())
816 result.addError(self, sys.exc_info())
817 return
817 return
818
818
819 success = False
819 success = False
820 try:
820 try:
821 self.runTest()
821 self.runTest()
822 except KeyboardInterrupt:
822 except KeyboardInterrupt:
823 self._aborted = True
823 self._aborted = True
824 raise
824 raise
825 except unittest.SkipTest as e:
825 except unittest.SkipTest as e:
826 result.addSkip(self, str(e))
826 result.addSkip(self, str(e))
827 # The base class will have already counted this as a
827 # The base class will have already counted this as a
828 # test we "ran", but we want to exclude skipped tests
828 # test we "ran", but we want to exclude skipped tests
829 # from those we count towards those run.
829 # from those we count towards those run.
830 result.testsRun -= 1
830 result.testsRun -= 1
831 except self.failureException as e:
831 except self.failureException as e:
832 # This differs from unittest in that we don't capture
832 # This differs from unittest in that we don't capture
833 # the stack trace. This is for historical reasons and
833 # the stack trace. This is for historical reasons and
834 # this decision could be revisited in the future,
834 # this decision could be revisited in the future,
835 # especially for PythonTest instances.
835 # especially for PythonTest instances.
836 if result.addFailure(self, str(e)):
836 if result.addFailure(self, str(e)):
837 success = True
837 success = True
838 except Exception:
838 except Exception:
839 result.addError(self, sys.exc_info())
839 result.addError(self, sys.exc_info())
840 else:
840 else:
841 success = True
841 success = True
842
842
843 try:
843 try:
844 self.tearDown()
844 self.tearDown()
845 except (KeyboardInterrupt, SystemExit):
845 except (KeyboardInterrupt, SystemExit):
846 self._aborted = True
846 self._aborted = True
847 raise
847 raise
848 except Exception:
848 except Exception:
849 result.addError(self, sys.exc_info())
849 result.addError(self, sys.exc_info())
850 success = False
850 success = False
851
851
852 if success:
852 if success:
853 result.addSuccess(self)
853 result.addSuccess(self)
854 finally:
854 finally:
855 result.stopTest(self, interrupted=self._aborted)
855 result.stopTest(self, interrupted=self._aborted)
856
856
857 def runTest(self):
857 def runTest(self):
858 """Run this test instance.
858 """Run this test instance.
859
859
860 This will return a tuple describing the result of the test.
860 This will return a tuple describing the result of the test.
861 """
861 """
862 env = self._getenv()
862 env = self._getenv()
863 self._genrestoreenv(env)
863 self._genrestoreenv(env)
864 self._daemonpids.append(env['DAEMON_PIDS'])
864 self._daemonpids.append(env['DAEMON_PIDS'])
865 self._createhgrc(env['HGRCPATH'])
865 self._createhgrc(env['HGRCPATH'])
866
866
867 vlog('# Test', self.name)
867 vlog('# Test', self.name)
868
868
869 ret, out = self._run(env)
869 ret, out = self._run(env)
870 self._finished = True
870 self._finished = True
871 self._ret = ret
871 self._ret = ret
872 self._out = out
872 self._out = out
873
873
874 def describe(ret):
874 def describe(ret):
875 if ret < 0:
875 if ret < 0:
876 return 'killed by signal: %d' % -ret
876 return 'killed by signal: %d' % -ret
877 return 'returned error code %d' % ret
877 return 'returned error code %d' % ret
878
878
879 self._skipped = False
879 self._skipped = False
880
880
881 if ret == self.SKIPPED_STATUS:
881 if ret == self.SKIPPED_STATUS:
882 if out is None: # Debug mode, nothing to parse.
882 if out is None: # Debug mode, nothing to parse.
883 missing = ['unknown']
883 missing = ['unknown']
884 failed = None
884 failed = None
885 else:
885 else:
886 missing, failed = TTest.parsehghaveoutput(out)
886 missing, failed = TTest.parsehghaveoutput(out)
887
887
888 if not missing:
888 if not missing:
889 missing = ['skipped']
889 missing = ['skipped']
890
890
891 if failed:
891 if failed:
892 self.fail('hg have failed checking for %s' % failed[-1])
892 self.fail('hg have failed checking for %s' % failed[-1])
893 else:
893 else:
894 self._skipped = True
894 self._skipped = True
895 raise unittest.SkipTest(missing[-1])
895 raise unittest.SkipTest(missing[-1])
896 elif ret == 'timeout':
896 elif ret == 'timeout':
897 self.fail('timed out')
897 self.fail('timed out')
898 elif ret is False:
898 elif ret is False:
899 self.fail('no result code from test')
899 self.fail('no result code from test')
900 elif out != self._refout:
900 elif out != self._refout:
901 # Diff generation may rely on written .err file.
901 # Diff generation may rely on written .err file.
902 if (ret != 0 or out != self._refout) and not self._skipped \
902 if (ret != 0 or out != self._refout) and not self._skipped \
903 and not self._debug:
903 and not self._debug:
904 f = open(self.errpath, 'wb')
904 f = open(self.errpath, 'wb')
905 for line in out:
905 for line in out:
906 f.write(line)
906 f.write(line)
907 f.close()
907 f.close()
908
908
909 # The result object handles diff calculation for us.
909 # The result object handles diff calculation for us.
910 if self._result.addOutputMismatch(self, ret, out, self._refout):
910 if self._result.addOutputMismatch(self, ret, out, self._refout):
911 # change was accepted, skip failing
911 # change was accepted, skip failing
912 return
912 return
913
913
914 if ret:
914 if ret:
915 msg = 'output changed and ' + describe(ret)
915 msg = 'output changed and ' + describe(ret)
916 else:
916 else:
917 msg = 'output changed'
917 msg = 'output changed'
918
918
919 self.fail(msg)
919 self.fail(msg)
920 elif ret:
920 elif ret:
921 self.fail(describe(ret))
921 self.fail(describe(ret))
922
922
923 def tearDown(self):
923 def tearDown(self):
924 """Tasks to perform after run()."""
924 """Tasks to perform after run()."""
925 for entry in self._daemonpids:
925 for entry in self._daemonpids:
926 killdaemons(entry)
926 killdaemons(entry)
927 self._daemonpids = []
927 self._daemonpids = []
928
928
929 if self._keeptmpdir:
929 if self._keeptmpdir:
930 log('\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s' %
930 log('\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s' %
931 (self._testtmp.decode('utf-8'),
931 (self._testtmp.decode('utf-8'),
932 self._threadtmp.decode('utf-8')))
932 self._threadtmp.decode('utf-8')))
933 else:
933 else:
934 shutil.rmtree(self._testtmp, True)
934 shutil.rmtree(self._testtmp, True)
935 shutil.rmtree(self._threadtmp, True)
935 shutil.rmtree(self._threadtmp, True)
936
936
937 if self._usechg:
937 if self._usechg:
938 # chgservers will stop automatically after they find the socket
938 # chgservers will stop automatically after they find the socket
939 # files are deleted
939 # files are deleted
940 shutil.rmtree(self._chgsockdir, True)
940 shutil.rmtree(self._chgsockdir, True)
941
941
942 if (self._ret != 0 or self._out != self._refout) and not self._skipped \
942 if (self._ret != 0 or self._out != self._refout) and not self._skipped \
943 and not self._debug and self._out:
943 and not self._debug and self._out:
944 f = open(self.errpath, 'wb')
944 f = open(self.errpath, 'wb')
945 for line in self._out:
945 for line in self._out:
946 f.write(line)
946 f.write(line)
947 f.close()
947 f.close()
948
948
949 vlog("# Ret was:", self._ret, '(%s)' % self.name)
949 vlog("# Ret was:", self._ret, '(%s)' % self.name)
950
950
951 def _run(self, env):
951 def _run(self, env):
952 # This should be implemented in child classes to run tests.
952 # This should be implemented in child classes to run tests.
953 raise unittest.SkipTest('unknown test type')
953 raise unittest.SkipTest('unknown test type')
954
954
955 def abort(self):
955 def abort(self):
956 """Terminate execution of this test."""
956 """Terminate execution of this test."""
957 self._aborted = True
957 self._aborted = True
958
958
959 def _portmap(self, i):
959 def _portmap(self, i):
960 offset = b'' if i == 0 else b'%d' % i
960 offset = b'' if i == 0 else b'%d' % i
961 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
961 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
962
962
963 def _getreplacements(self):
963 def _getreplacements(self):
964 """Obtain a mapping of text replacements to apply to test output.
964 """Obtain a mapping of text replacements to apply to test output.
965
965
966 Test output needs to be normalized so it can be compared to expected
966 Test output needs to be normalized so it can be compared to expected
967 output. This function defines how some of that normalization will
967 output. This function defines how some of that normalization will
968 occur.
968 occur.
969 """
969 """
970 r = [
970 r = [
971 # This list should be parallel to defineport in _getenv
971 # This list should be parallel to defineport in _getenv
972 self._portmap(0),
972 self._portmap(0),
973 self._portmap(1),
973 self._portmap(1),
974 self._portmap(2),
974 self._portmap(2),
975 (br'(?m)^(saved backup bundle to .*\.hg)( \(glob\))?$',
975 (br'(?m)^(saved backup bundle to .*\.hg)( \(glob\))?$',
976 br'\1 (glob)'),
976 br'\1 (glob)'),
977 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
977 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
978 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
978 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
979 ]
979 ]
980 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
980 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
981
981
982 replacementfile = os.path.join(self._testdir, b'common-pattern.py')
982 replacementfile = os.path.join(self._testdir, b'common-pattern.py')
983
983
984 if os.path.exists(replacementfile):
984 if os.path.exists(replacementfile):
985 data = {}
985 data = {}
986 with open(replacementfile, mode='rb') as source:
986 with open(replacementfile, mode='rb') as source:
987 # the intermediate 'compile' step help with debugging
987 # the intermediate 'compile' step help with debugging
988 code = compile(source.read(), replacementfile, 'exec')
988 code = compile(source.read(), replacementfile, 'exec')
989 exec(code, data)
989 exec(code, data)
990 r.extend(data.get('substitutions', ()))
990 r.extend(data.get('substitutions', ()))
991 return r
991 return r
992
992
993 def _escapepath(self, p):
993 def _escapepath(self, p):
994 if os.name == 'nt':
994 if os.name == 'nt':
995 return (
995 return (
996 (b''.join(c.isalpha() and b'[%s%s]' % (c.lower(), c.upper()) or
996 (b''.join(c.isalpha() and b'[%s%s]' % (c.lower(), c.upper()) or
997 c in b'/\\' and br'[/\\]' or c.isdigit() and c or b'\\' + c
997 c in b'/\\' and br'[/\\]' or c.isdigit() and c or b'\\' + c
998 for c in p))
998 for c in p))
999 )
999 )
1000 else:
1000 else:
1001 return re.escape(p)
1001 return re.escape(p)
1002
1002
1003 def _localip(self):
1003 def _localip(self):
1004 if self._useipv6:
1004 if self._useipv6:
1005 return b'::1'
1005 return b'::1'
1006 else:
1006 else:
1007 return b'127.0.0.1'
1007 return b'127.0.0.1'
1008
1008
1009 def _genrestoreenv(self, testenv):
1009 def _genrestoreenv(self, testenv):
1010 """Generate a script that can be used by tests to restore the original
1010 """Generate a script that can be used by tests to restore the original
1011 environment."""
1011 environment."""
1012 # Put the restoreenv script inside self._threadtmp
1012 # Put the restoreenv script inside self._threadtmp
1013 scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
1013 scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
1014 testenv['HGTEST_RESTOREENV'] = scriptpath
1014 testenv['HGTEST_RESTOREENV'] = scriptpath
1015
1015
1016 # Only restore environment variable names that the shell allows
1016 # Only restore environment variable names that the shell allows
1017 # us to export.
1017 # us to export.
1018 name_regex = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$')
1018 name_regex = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$')
1019
1019
1020 # Do not restore these variables; otherwise tests would fail.
1020 # Do not restore these variables; otherwise tests would fail.
1021 reqnames = {'PYTHON', 'TESTDIR', 'TESTTMP'}
1021 reqnames = {'PYTHON', 'TESTDIR', 'TESTTMP'}
1022
1022
1023 with open(scriptpath, 'w') as envf:
1023 with open(scriptpath, 'w') as envf:
1024 for name, value in origenviron.items():
1024 for name, value in origenviron.items():
1025 if not name_regex.match(name):
1025 if not name_regex.match(name):
1026 # Skip environment variables with unusual names not
1026 # Skip environment variables with unusual names not
1027 # allowed by most shells.
1027 # allowed by most shells.
1028 continue
1028 continue
1029 if name in reqnames:
1029 if name in reqnames:
1030 continue
1030 continue
1031 envf.write('%s=%s\n' % (name, shellquote(value)))
1031 envf.write('%s=%s\n' % (name, shellquote(value)))
1032
1032
1033 for name in testenv:
1033 for name in testenv:
1034 if name in origenviron or name in reqnames:
1034 if name in origenviron or name in reqnames:
1035 continue
1035 continue
1036 envf.write('unset %s\n' % (name,))
1036 envf.write('unset %s\n' % (name,))
1037
1037
1038 def _getenv(self):
1038 def _getenv(self):
1039 """Obtain environment variables to use during test execution."""
1039 """Obtain environment variables to use during test execution."""
1040 def defineport(i):
1040 def defineport(i):
1041 offset = '' if i == 0 else '%s' % i
1041 offset = '' if i == 0 else '%s' % i
1042 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
1042 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
1043 env = os.environ.copy()
1043 env = os.environ.copy()
1044 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase')
1044 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase')
1045 env['HGEMITWARNINGS'] = '1'
1045 env['HGEMITWARNINGS'] = '1'
1046 env['TESTTMP'] = self._testtmp
1046 env['TESTTMP'] = self._testtmp
1047 env['HOME'] = self._testtmp
1047 env['HOME'] = self._testtmp
1048 # This number should match portneeded in _getport
1048 # This number should match portneeded in _getport
1049 for port in xrange(3):
1049 for port in xrange(3):
1050 # This list should be parallel to _portmap in _getreplacements
1050 # This list should be parallel to _portmap in _getreplacements
1051 defineport(port)
1051 defineport(port)
1052 env["HGRCPATH"] = os.path.join(self._threadtmp, b'.hgrc')
1052 env["HGRCPATH"] = os.path.join(self._threadtmp, b'.hgrc')
1053 env["DAEMON_PIDS"] = os.path.join(self._threadtmp, b'daemon.pids')
1053 env["DAEMON_PIDS"] = os.path.join(self._threadtmp, b'daemon.pids')
1054 env["HGEDITOR"] = ('"' + sys.executable + '"'
1054 env["HGEDITOR"] = ('"' + sys.executable + '"'
1055 + ' -c "import sys; sys.exit(0)"')
1055 + ' -c "import sys; sys.exit(0)"')
1056 env["HGMERGE"] = "internal:merge"
1056 env["HGMERGE"] = "internal:merge"
1057 env["HGUSER"] = "test"
1057 env["HGUSER"] = "test"
1058 env["HGENCODING"] = "ascii"
1058 env["HGENCODING"] = "ascii"
1059 env["HGENCODINGMODE"] = "strict"
1059 env["HGENCODINGMODE"] = "strict"
1060 env['HGIPV6'] = str(int(self._useipv6))
1060 env['HGIPV6'] = str(int(self._useipv6))
1061
1061
1062 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
1062 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
1063 # IP addresses.
1063 # IP addresses.
1064 env['LOCALIP'] = self._localip()
1064 env['LOCALIP'] = self._localip()
1065
1065
1066 # Reset some environment variables to well-known values so that
1066 # Reset some environment variables to well-known values so that
1067 # the tests produce repeatable output.
1067 # the tests produce repeatable output.
1068 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
1068 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
1069 env['TZ'] = 'GMT'
1069 env['TZ'] = 'GMT'
1070 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1070 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1071 env['COLUMNS'] = '80'
1071 env['COLUMNS'] = '80'
1072 env['TERM'] = 'xterm'
1072 env['TERM'] = 'xterm'
1073
1073
1074 for k in ('HG HGPROF CDPATH GREP_OPTIONS http_proxy no_proxy ' +
1074 for k in ('HG HGPROF CDPATH GREP_OPTIONS http_proxy no_proxy ' +
1075 'HGPLAIN HGPLAINEXCEPT EDITOR VISUAL PAGER ' +
1075 'HGPLAIN HGPLAINEXCEPT EDITOR VISUAL PAGER ' +
1076 'NO_PROXY CHGDEBUG').split():
1076 'NO_PROXY CHGDEBUG').split():
1077 if k in env:
1077 if k in env:
1078 del env[k]
1078 del env[k]
1079
1079
1080 # unset env related to hooks
1080 # unset env related to hooks
1081 for k in env.keys():
1081 for k in env.keys():
1082 if k.startswith('HG_'):
1082 if k.startswith('HG_'):
1083 del env[k]
1083 del env[k]
1084
1084
1085 if self._usechg:
1085 if self._usechg:
1086 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
1086 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
1087
1087
1088 return env
1088 return env
1089
1089
1090 def _createhgrc(self, path):
1090 def _createhgrc(self, path):
1091 """Create an hgrc file for this test."""
1091 """Create an hgrc file for this test."""
1092 hgrc = open(path, 'wb')
1092 hgrc = open(path, 'wb')
1093 hgrc.write(b'[ui]\n')
1093 hgrc.write(b'[ui]\n')
1094 hgrc.write(b'slash = True\n')
1094 hgrc.write(b'slash = True\n')
1095 hgrc.write(b'interactive = False\n')
1095 hgrc.write(b'interactive = False\n')
1096 hgrc.write(b'mergemarkers = detailed\n')
1096 hgrc.write(b'mergemarkers = detailed\n')
1097 hgrc.write(b'promptecho = True\n')
1097 hgrc.write(b'promptecho = True\n')
1098 hgrc.write(b'[defaults]\n')
1098 hgrc.write(b'[defaults]\n')
1099 hgrc.write(b'[devel]\n')
1099 hgrc.write(b'[devel]\n')
1100 hgrc.write(b'all-warnings = true\n')
1100 hgrc.write(b'all-warnings = true\n')
1101 hgrc.write(b'default-date = 0 0\n')
1101 hgrc.write(b'default-date = 0 0\n')
1102 hgrc.write(b'[largefiles]\n')
1102 hgrc.write(b'[largefiles]\n')
1103 hgrc.write(b'usercache = %s\n' %
1103 hgrc.write(b'usercache = %s\n' %
1104 (os.path.join(self._testtmp, b'.cache/largefiles')))
1104 (os.path.join(self._testtmp, b'.cache/largefiles')))
1105 hgrc.write(b'[lfs]\n')
1106 hgrc.write(b'usercache = %s\n' %
1107 (os.path.join(self._testtmp, b'.cache/lfs')))
1105 hgrc.write(b'[web]\n')
1108 hgrc.write(b'[web]\n')
1106 hgrc.write(b'address = localhost\n')
1109 hgrc.write(b'address = localhost\n')
1107 hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
1110 hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
1108
1111
1109 for opt in self._extraconfigopts:
1112 for opt in self._extraconfigopts:
1110 section, key = opt.encode('utf-8').split(b'.', 1)
1113 section, key = opt.encode('utf-8').split(b'.', 1)
1111 assert b'=' in key, ('extra config opt %s must '
1114 assert b'=' in key, ('extra config opt %s must '
1112 'have an = for assignment' % opt)
1115 'have an = for assignment' % opt)
1113 hgrc.write(b'[%s]\n%s\n' % (section, key))
1116 hgrc.write(b'[%s]\n%s\n' % (section, key))
1114 hgrc.close()
1117 hgrc.close()
1115
1118
1116 def fail(self, msg):
1119 def fail(self, msg):
1117 # unittest differentiates between errored and failed.
1120 # unittest differentiates between errored and failed.
1118 # Failed is denoted by AssertionError (by default at least).
1121 # Failed is denoted by AssertionError (by default at least).
1119 raise AssertionError(msg)
1122 raise AssertionError(msg)
1120
1123
1121 def _runcommand(self, cmd, env, normalizenewlines=False):
1124 def _runcommand(self, cmd, env, normalizenewlines=False):
1122 """Run command in a sub-process, capturing the output (stdout and
1125 """Run command in a sub-process, capturing the output (stdout and
1123 stderr).
1126 stderr).
1124
1127
1125 Return a tuple (exitcode, output). output is None in debug mode.
1128 Return a tuple (exitcode, output). output is None in debug mode.
1126 """
1129 """
1127 if self._debug:
1130 if self._debug:
1128 proc = subprocess.Popen(cmd, shell=True, cwd=self._testtmp,
1131 proc = subprocess.Popen(cmd, shell=True, cwd=self._testtmp,
1129 env=env)
1132 env=env)
1130 ret = proc.wait()
1133 ret = proc.wait()
1131 return (ret, None)
1134 return (ret, None)
1132
1135
1133 proc = Popen4(cmd, self._testtmp, self._timeout, env)
1136 proc = Popen4(cmd, self._testtmp, self._timeout, env)
1134 def cleanup():
1137 def cleanup():
1135 terminate(proc)
1138 terminate(proc)
1136 ret = proc.wait()
1139 ret = proc.wait()
1137 if ret == 0:
1140 if ret == 0:
1138 ret = signal.SIGTERM << 8
1141 ret = signal.SIGTERM << 8
1139 killdaemons(env['DAEMON_PIDS'])
1142 killdaemons(env['DAEMON_PIDS'])
1140 return ret
1143 return ret
1141
1144
1142 output = ''
1145 output = ''
1143 proc.tochild.close()
1146 proc.tochild.close()
1144
1147
1145 try:
1148 try:
1146 output = proc.fromchild.read()
1149 output = proc.fromchild.read()
1147 except KeyboardInterrupt:
1150 except KeyboardInterrupt:
1148 vlog('# Handling keyboard interrupt')
1151 vlog('# Handling keyboard interrupt')
1149 cleanup()
1152 cleanup()
1150 raise
1153 raise
1151
1154
1152 ret = proc.wait()
1155 ret = proc.wait()
1153 if wifexited(ret):
1156 if wifexited(ret):
1154 ret = os.WEXITSTATUS(ret)
1157 ret = os.WEXITSTATUS(ret)
1155
1158
1156 if proc.timeout:
1159 if proc.timeout:
1157 ret = 'timeout'
1160 ret = 'timeout'
1158
1161
1159 if ret:
1162 if ret:
1160 killdaemons(env['DAEMON_PIDS'])
1163 killdaemons(env['DAEMON_PIDS'])
1161
1164
1162 for s, r in self._getreplacements():
1165 for s, r in self._getreplacements():
1163 output = re.sub(s, r, output)
1166 output = re.sub(s, r, output)
1164
1167
1165 if normalizenewlines:
1168 if normalizenewlines:
1166 output = output.replace('\r\n', '\n')
1169 output = output.replace('\r\n', '\n')
1167
1170
1168 return ret, output.splitlines(True)
1171 return ret, output.splitlines(True)
1169
1172
1170 class PythonTest(Test):
1173 class PythonTest(Test):
1171 """A Python-based test."""
1174 """A Python-based test."""
1172
1175
1173 @property
1176 @property
1174 def refpath(self):
1177 def refpath(self):
1175 return os.path.join(self._testdir, b'%s.out' % self.bname)
1178 return os.path.join(self._testdir, b'%s.out' % self.bname)
1176
1179
1177 def _run(self, env):
1180 def _run(self, env):
1178 py3kswitch = self._py3kwarnings and b' -3' or b''
1181 py3kswitch = self._py3kwarnings and b' -3' or b''
1179 cmd = b'%s%s "%s"' % (PYTHON, py3kswitch, self.path)
1182 cmd = b'%s%s "%s"' % (PYTHON, py3kswitch, self.path)
1180 vlog("# Running", cmd)
1183 vlog("# Running", cmd)
1181 normalizenewlines = os.name == 'nt'
1184 normalizenewlines = os.name == 'nt'
1182 result = self._runcommand(cmd, env,
1185 result = self._runcommand(cmd, env,
1183 normalizenewlines=normalizenewlines)
1186 normalizenewlines=normalizenewlines)
1184 if self._aborted:
1187 if self._aborted:
1185 raise KeyboardInterrupt()
1188 raise KeyboardInterrupt()
1186
1189
1187 return result
1190 return result
1188
1191
1189 # Some glob patterns apply only in some circumstances, so the script
1192 # Some glob patterns apply only in some circumstances, so the script
1190 # might want to remove (glob) annotations that otherwise should be
1193 # might want to remove (glob) annotations that otherwise should be
1191 # retained.
1194 # retained.
1192 checkcodeglobpats = [
1195 checkcodeglobpats = [
1193 # On Windows it looks like \ doesn't require a (glob), but we know
1196 # On Windows it looks like \ doesn't require a (glob), but we know
1194 # better.
1197 # better.
1195 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1198 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1196 re.compile(br'^moving \S+/.*[^)]$'),
1199 re.compile(br'^moving \S+/.*[^)]$'),
1197 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1200 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1198 # Not all platforms have 127.0.0.1 as loopback (though most do),
1201 # Not all platforms have 127.0.0.1 as loopback (though most do),
1199 # so we always glob that too.
1202 # so we always glob that too.
1200 re.compile(br'.*\$LOCALIP.*$'),
1203 re.compile(br'.*\$LOCALIP.*$'),
1201 ]
1204 ]
1202
1205
1203 bchr = chr
1206 bchr = chr
1204 if PYTHON3:
1207 if PYTHON3:
1205 bchr = lambda x: bytes([x])
1208 bchr = lambda x: bytes([x])
1206
1209
1207 class TTest(Test):
1210 class TTest(Test):
1208 """A "t test" is a test backed by a .t file."""
1211 """A "t test" is a test backed by a .t file."""
1209
1212
1210 SKIPPED_PREFIX = b'skipped: '
1213 SKIPPED_PREFIX = b'skipped: '
1211 FAILED_PREFIX = b'hghave check failed: '
1214 FAILED_PREFIX = b'hghave check failed: '
1212 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1215 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1213
1216
1214 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1217 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1215 ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
1218 ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
1216 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1219 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1217
1220
1218 def __init__(self, path, *args, **kwds):
1221 def __init__(self, path, *args, **kwds):
1219 # accept an extra "case" parameter
1222 # accept an extra "case" parameter
1220 case = kwds.pop('case', None)
1223 case = kwds.pop('case', None)
1221 self._case = case
1224 self._case = case
1222 self._allcases = parsettestcases(path)
1225 self._allcases = parsettestcases(path)
1223 super(TTest, self).__init__(path, *args, **kwds)
1226 super(TTest, self).__init__(path, *args, **kwds)
1224 if case:
1227 if case:
1225 self.name = '%s (case %s)' % (self.name, _strpath(case))
1228 self.name = '%s (case %s)' % (self.name, _strpath(case))
1226 self.errpath = b'%s.%s.err' % (self.errpath[:-4], case)
1229 self.errpath = b'%s.%s.err' % (self.errpath[:-4], case)
1227 self._tmpname += b'-%s' % case
1230 self._tmpname += b'-%s' % case
1228
1231
1229 @property
1232 @property
1230 def refpath(self):
1233 def refpath(self):
1231 return os.path.join(self._testdir, self.bname)
1234 return os.path.join(self._testdir, self.bname)
1232
1235
1233 def _run(self, env):
1236 def _run(self, env):
1234 f = open(self.path, 'rb')
1237 f = open(self.path, 'rb')
1235 lines = f.readlines()
1238 lines = f.readlines()
1236 f.close()
1239 f.close()
1237
1240
1238 # .t file is both reference output and the test input, keep reference
1241 # .t file is both reference output and the test input, keep reference
1239 # output updated with the the test input. This avoids some race
1242 # output updated with the the test input. This avoids some race
1240 # conditions where the reference output does not match the actual test.
1243 # conditions where the reference output does not match the actual test.
1241 if self._refout is not None:
1244 if self._refout is not None:
1242 self._refout = lines
1245 self._refout = lines
1243
1246
1244 salt, script, after, expected = self._parsetest(lines)
1247 salt, script, after, expected = self._parsetest(lines)
1245
1248
1246 # Write out the generated script.
1249 # Write out the generated script.
1247 fname = b'%s.sh' % self._testtmp
1250 fname = b'%s.sh' % self._testtmp
1248 f = open(fname, 'wb')
1251 f = open(fname, 'wb')
1249 for l in script:
1252 for l in script:
1250 f.write(l)
1253 f.write(l)
1251 f.close()
1254 f.close()
1252
1255
1253 cmd = b'%s "%s"' % (self._shell, fname)
1256 cmd = b'%s "%s"' % (self._shell, fname)
1254 vlog("# Running", cmd)
1257 vlog("# Running", cmd)
1255
1258
1256 exitcode, output = self._runcommand(cmd, env)
1259 exitcode, output = self._runcommand(cmd, env)
1257
1260
1258 if self._aborted:
1261 if self._aborted:
1259 raise KeyboardInterrupt()
1262 raise KeyboardInterrupt()
1260
1263
1261 # Do not merge output if skipped. Return hghave message instead.
1264 # Do not merge output if skipped. Return hghave message instead.
1262 # Similarly, with --debug, output is None.
1265 # Similarly, with --debug, output is None.
1263 if exitcode == self.SKIPPED_STATUS or output is None:
1266 if exitcode == self.SKIPPED_STATUS or output is None:
1264 return exitcode, output
1267 return exitcode, output
1265
1268
1266 return self._processoutput(exitcode, output, salt, after, expected)
1269 return self._processoutput(exitcode, output, salt, after, expected)
1267
1270
1268 def _hghave(self, reqs):
1271 def _hghave(self, reqs):
1269 # TODO do something smarter when all other uses of hghave are gone.
1272 # TODO do something smarter when all other uses of hghave are gone.
1270 runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
1273 runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
1271 tdir = runtestdir.replace(b'\\', b'/')
1274 tdir = runtestdir.replace(b'\\', b'/')
1272 proc = Popen4(b'%s -c "%s/hghave %s"' %
1275 proc = Popen4(b'%s -c "%s/hghave %s"' %
1273 (self._shell, tdir, b' '.join(reqs)),
1276 (self._shell, tdir, b' '.join(reqs)),
1274 self._testtmp, 0, self._getenv())
1277 self._testtmp, 0, self._getenv())
1275 stdout, stderr = proc.communicate()
1278 stdout, stderr = proc.communicate()
1276 ret = proc.wait()
1279 ret = proc.wait()
1277 if wifexited(ret):
1280 if wifexited(ret):
1278 ret = os.WEXITSTATUS(ret)
1281 ret = os.WEXITSTATUS(ret)
1279 if ret == 2:
1282 if ret == 2:
1280 print(stdout.decode('utf-8'))
1283 print(stdout.decode('utf-8'))
1281 sys.exit(1)
1284 sys.exit(1)
1282
1285
1283 if ret != 0:
1286 if ret != 0:
1284 return False, stdout
1287 return False, stdout
1285
1288
1286 if b'slow' in reqs:
1289 if b'slow' in reqs:
1287 self._timeout = self._slowtimeout
1290 self._timeout = self._slowtimeout
1288 return True, None
1291 return True, None
1289
1292
1290 def _iftest(self, args):
1293 def _iftest(self, args):
1291 # implements "#if"
1294 # implements "#if"
1292 reqs = []
1295 reqs = []
1293 for arg in args:
1296 for arg in args:
1294 if arg.startswith(b'no-') and arg[3:] in self._allcases:
1297 if arg.startswith(b'no-') and arg[3:] in self._allcases:
1295 if arg[3:] == self._case:
1298 if arg[3:] == self._case:
1296 return False
1299 return False
1297 elif arg in self._allcases:
1300 elif arg in self._allcases:
1298 if arg != self._case:
1301 if arg != self._case:
1299 return False
1302 return False
1300 else:
1303 else:
1301 reqs.append(arg)
1304 reqs.append(arg)
1302 return self._hghave(reqs)[0]
1305 return self._hghave(reqs)[0]
1303
1306
1304 def _parsetest(self, lines):
1307 def _parsetest(self, lines):
1305 # We generate a shell script which outputs unique markers to line
1308 # We generate a shell script which outputs unique markers to line
1306 # up script results with our source. These markers include input
1309 # up script results with our source. These markers include input
1307 # line number and the last return code.
1310 # line number and the last return code.
1308 salt = b"SALT%d" % time.time()
1311 salt = b"SALT%d" % time.time()
1309 def addsalt(line, inpython):
1312 def addsalt(line, inpython):
1310 if inpython:
1313 if inpython:
1311 script.append(b'%s %d 0\n' % (salt, line))
1314 script.append(b'%s %d 0\n' % (salt, line))
1312 else:
1315 else:
1313 script.append(b'echo %s %d $?\n' % (salt, line))
1316 script.append(b'echo %s %d $?\n' % (salt, line))
1314
1317
1315 script = []
1318 script = []
1316
1319
1317 # After we run the shell script, we re-unify the script output
1320 # After we run the shell script, we re-unify the script output
1318 # with non-active parts of the source, with synchronization by our
1321 # with non-active parts of the source, with synchronization by our
1319 # SALT line number markers. The after table contains the non-active
1322 # SALT line number markers. The after table contains the non-active
1320 # components, ordered by line number.
1323 # components, ordered by line number.
1321 after = {}
1324 after = {}
1322
1325
1323 # Expected shell script output.
1326 # Expected shell script output.
1324 expected = {}
1327 expected = {}
1325
1328
1326 pos = prepos = -1
1329 pos = prepos = -1
1327
1330
1328 # True or False when in a true or false conditional section
1331 # True or False when in a true or false conditional section
1329 skipping = None
1332 skipping = None
1330
1333
1331 # We keep track of whether or not we're in a Python block so we
1334 # We keep track of whether or not we're in a Python block so we
1332 # can generate the surrounding doctest magic.
1335 # can generate the surrounding doctest magic.
1333 inpython = False
1336 inpython = False
1334
1337
1335 if self._debug:
1338 if self._debug:
1336 script.append(b'set -x\n')
1339 script.append(b'set -x\n')
1337 if self._hgcommand != b'hg':
1340 if self._hgcommand != b'hg':
1338 script.append(b'alias hg="%s"\n' % self._hgcommand)
1341 script.append(b'alias hg="%s"\n' % self._hgcommand)
1339 if os.getenv('MSYSTEM'):
1342 if os.getenv('MSYSTEM'):
1340 script.append(b'alias pwd="pwd -W"\n')
1343 script.append(b'alias pwd="pwd -W"\n')
1341
1344
1342 n = 0
1345 n = 0
1343 for n, l in enumerate(lines):
1346 for n, l in enumerate(lines):
1344 if not l.endswith(b'\n'):
1347 if not l.endswith(b'\n'):
1345 l += b'\n'
1348 l += b'\n'
1346 if l.startswith(b'#require'):
1349 if l.startswith(b'#require'):
1347 lsplit = l.split()
1350 lsplit = l.split()
1348 if len(lsplit) < 2 or lsplit[0] != b'#require':
1351 if len(lsplit) < 2 or lsplit[0] != b'#require':
1349 after.setdefault(pos, []).append(' !!! invalid #require\n')
1352 after.setdefault(pos, []).append(' !!! invalid #require\n')
1350 haveresult, message = self._hghave(lsplit[1:])
1353 haveresult, message = self._hghave(lsplit[1:])
1351 if not haveresult:
1354 if not haveresult:
1352 script = [b'echo "%s"\nexit 80\n' % message]
1355 script = [b'echo "%s"\nexit 80\n' % message]
1353 break
1356 break
1354 after.setdefault(pos, []).append(l)
1357 after.setdefault(pos, []).append(l)
1355 elif l.startswith(b'#if'):
1358 elif l.startswith(b'#if'):
1356 lsplit = l.split()
1359 lsplit = l.split()
1357 if len(lsplit) < 2 or lsplit[0] != b'#if':
1360 if len(lsplit) < 2 or lsplit[0] != b'#if':
1358 after.setdefault(pos, []).append(' !!! invalid #if\n')
1361 after.setdefault(pos, []).append(' !!! invalid #if\n')
1359 if skipping is not None:
1362 if skipping is not None:
1360 after.setdefault(pos, []).append(' !!! nested #if\n')
1363 after.setdefault(pos, []).append(' !!! nested #if\n')
1361 skipping = not self._iftest(lsplit[1:])
1364 skipping = not self._iftest(lsplit[1:])
1362 after.setdefault(pos, []).append(l)
1365 after.setdefault(pos, []).append(l)
1363 elif l.startswith(b'#else'):
1366 elif l.startswith(b'#else'):
1364 if skipping is None:
1367 if skipping is None:
1365 after.setdefault(pos, []).append(' !!! missing #if\n')
1368 after.setdefault(pos, []).append(' !!! missing #if\n')
1366 skipping = not skipping
1369 skipping = not skipping
1367 after.setdefault(pos, []).append(l)
1370 after.setdefault(pos, []).append(l)
1368 elif l.startswith(b'#endif'):
1371 elif l.startswith(b'#endif'):
1369 if skipping is None:
1372 if skipping is None:
1370 after.setdefault(pos, []).append(' !!! missing #if\n')
1373 after.setdefault(pos, []).append(' !!! missing #if\n')
1371 skipping = None
1374 skipping = None
1372 after.setdefault(pos, []).append(l)
1375 after.setdefault(pos, []).append(l)
1373 elif skipping:
1376 elif skipping:
1374 after.setdefault(pos, []).append(l)
1377 after.setdefault(pos, []).append(l)
1375 elif l.startswith(b' >>> '): # python inlines
1378 elif l.startswith(b' >>> '): # python inlines
1376 after.setdefault(pos, []).append(l)
1379 after.setdefault(pos, []).append(l)
1377 prepos = pos
1380 prepos = pos
1378 pos = n
1381 pos = n
1379 if not inpython:
1382 if not inpython:
1380 # We've just entered a Python block. Add the header.
1383 # We've just entered a Python block. Add the header.
1381 inpython = True
1384 inpython = True
1382 addsalt(prepos, False) # Make sure we report the exit code.
1385 addsalt(prepos, False) # Make sure we report the exit code.
1383 script.append(b'%s -m heredoctest <<EOF\n' % PYTHON)
1386 script.append(b'%s -m heredoctest <<EOF\n' % PYTHON)
1384 addsalt(n, True)
1387 addsalt(n, True)
1385 script.append(l[2:])
1388 script.append(l[2:])
1386 elif l.startswith(b' ... '): # python inlines
1389 elif l.startswith(b' ... '): # python inlines
1387 after.setdefault(prepos, []).append(l)
1390 after.setdefault(prepos, []).append(l)
1388 script.append(l[2:])
1391 script.append(l[2:])
1389 elif l.startswith(b' $ '): # commands
1392 elif l.startswith(b' $ '): # commands
1390 if inpython:
1393 if inpython:
1391 script.append(b'EOF\n')
1394 script.append(b'EOF\n')
1392 inpython = False
1395 inpython = False
1393 after.setdefault(pos, []).append(l)
1396 after.setdefault(pos, []).append(l)
1394 prepos = pos
1397 prepos = pos
1395 pos = n
1398 pos = n
1396 addsalt(n, False)
1399 addsalt(n, False)
1397 cmd = l[4:].split()
1400 cmd = l[4:].split()
1398 if len(cmd) == 2 and cmd[0] == b'cd':
1401 if len(cmd) == 2 and cmd[0] == b'cd':
1399 l = b' $ cd %s || exit 1\n' % cmd[1]
1402 l = b' $ cd %s || exit 1\n' % cmd[1]
1400 script.append(l[4:])
1403 script.append(l[4:])
1401 elif l.startswith(b' > '): # continuations
1404 elif l.startswith(b' > '): # continuations
1402 after.setdefault(prepos, []).append(l)
1405 after.setdefault(prepos, []).append(l)
1403 script.append(l[4:])
1406 script.append(l[4:])
1404 elif l.startswith(b' '): # results
1407 elif l.startswith(b' '): # results
1405 # Queue up a list of expected results.
1408 # Queue up a list of expected results.
1406 expected.setdefault(pos, []).append(l[2:])
1409 expected.setdefault(pos, []).append(l[2:])
1407 else:
1410 else:
1408 if inpython:
1411 if inpython:
1409 script.append(b'EOF\n')
1412 script.append(b'EOF\n')
1410 inpython = False
1413 inpython = False
1411 # Non-command/result. Queue up for merged output.
1414 # Non-command/result. Queue up for merged output.
1412 after.setdefault(pos, []).append(l)
1415 after.setdefault(pos, []).append(l)
1413
1416
1414 if inpython:
1417 if inpython:
1415 script.append(b'EOF\n')
1418 script.append(b'EOF\n')
1416 if skipping is not None:
1419 if skipping is not None:
1417 after.setdefault(pos, []).append(' !!! missing #endif\n')
1420 after.setdefault(pos, []).append(' !!! missing #endif\n')
1418 addsalt(n + 1, False)
1421 addsalt(n + 1, False)
1419
1422
1420 return salt, script, after, expected
1423 return salt, script, after, expected
1421
1424
1422 def _processoutput(self, exitcode, output, salt, after, expected):
1425 def _processoutput(self, exitcode, output, salt, after, expected):
1423 # Merge the script output back into a unified test.
1426 # Merge the script output back into a unified test.
1424 warnonly = 1 # 1: not yet; 2: yes; 3: for sure not
1427 warnonly = 1 # 1: not yet; 2: yes; 3: for sure not
1425 if exitcode != 0:
1428 if exitcode != 0:
1426 warnonly = 3
1429 warnonly = 3
1427
1430
1428 pos = -1
1431 pos = -1
1429 postout = []
1432 postout = []
1430 for l in output:
1433 for l in output:
1431 lout, lcmd = l, None
1434 lout, lcmd = l, None
1432 if salt in l:
1435 if salt in l:
1433 lout, lcmd = l.split(salt, 1)
1436 lout, lcmd = l.split(salt, 1)
1434
1437
1435 while lout:
1438 while lout:
1436 if not lout.endswith(b'\n'):
1439 if not lout.endswith(b'\n'):
1437 lout += b' (no-eol)\n'
1440 lout += b' (no-eol)\n'
1438
1441
1439 # Find the expected output at the current position.
1442 # Find the expected output at the current position.
1440 els = [None]
1443 els = [None]
1441 if expected.get(pos, None):
1444 if expected.get(pos, None):
1442 els = expected[pos]
1445 els = expected[pos]
1443
1446
1444 i = 0
1447 i = 0
1445 optional = []
1448 optional = []
1446 while i < len(els):
1449 while i < len(els):
1447 el = els[i]
1450 el = els[i]
1448
1451
1449 r = self.linematch(el, lout)
1452 r = self.linematch(el, lout)
1450 if isinstance(r, str):
1453 if isinstance(r, str):
1451 if r == '+glob':
1454 if r == '+glob':
1452 lout = el[:-1] + ' (glob)\n'
1455 lout = el[:-1] + ' (glob)\n'
1453 r = '' # Warn only this line.
1456 r = '' # Warn only this line.
1454 elif r == '-glob':
1457 elif r == '-glob':
1455 lout = ''.join(el.rsplit(' (glob)', 1))
1458 lout = ''.join(el.rsplit(' (glob)', 1))
1456 r = '' # Warn only this line.
1459 r = '' # Warn only this line.
1457 elif r == "retry":
1460 elif r == "retry":
1458 postout.append(b' ' + el)
1461 postout.append(b' ' + el)
1459 els.pop(i)
1462 els.pop(i)
1460 break
1463 break
1461 else:
1464 else:
1462 log('\ninfo, unknown linematch result: %r\n' % r)
1465 log('\ninfo, unknown linematch result: %r\n' % r)
1463 r = False
1466 r = False
1464 if r:
1467 if r:
1465 els.pop(i)
1468 els.pop(i)
1466 break
1469 break
1467 if el:
1470 if el:
1468 if el.endswith(b" (?)\n"):
1471 if el.endswith(b" (?)\n"):
1469 optional.append(i)
1472 optional.append(i)
1470 else:
1473 else:
1471 m = optline.match(el)
1474 m = optline.match(el)
1472 if m:
1475 if m:
1473 conditions = [
1476 conditions = [
1474 c for c in m.group(2).split(b' ')]
1477 c for c in m.group(2).split(b' ')]
1475
1478
1476 if not self._iftest(conditions):
1479 if not self._iftest(conditions):
1477 optional.append(i)
1480 optional.append(i)
1478
1481
1479 i += 1
1482 i += 1
1480
1483
1481 if r:
1484 if r:
1482 if r == "retry":
1485 if r == "retry":
1483 continue
1486 continue
1484 # clean up any optional leftovers
1487 # clean up any optional leftovers
1485 for i in optional:
1488 for i in optional:
1486 postout.append(b' ' + els[i])
1489 postout.append(b' ' + els[i])
1487 for i in reversed(optional):
1490 for i in reversed(optional):
1488 del els[i]
1491 del els[i]
1489 postout.append(b' ' + el)
1492 postout.append(b' ' + el)
1490 else:
1493 else:
1491 if self.NEEDESCAPE(lout):
1494 if self.NEEDESCAPE(lout):
1492 lout = TTest._stringescape(b'%s (esc)\n' %
1495 lout = TTest._stringescape(b'%s (esc)\n' %
1493 lout.rstrip(b'\n'))
1496 lout.rstrip(b'\n'))
1494 postout.append(b' ' + lout) # Let diff deal with it.
1497 postout.append(b' ' + lout) # Let diff deal with it.
1495 if r != '': # If line failed.
1498 if r != '': # If line failed.
1496 warnonly = 3 # for sure not
1499 warnonly = 3 # for sure not
1497 elif warnonly == 1: # Is "not yet" and line is warn only.
1500 elif warnonly == 1: # Is "not yet" and line is warn only.
1498 warnonly = 2 # Yes do warn.
1501 warnonly = 2 # Yes do warn.
1499 break
1502 break
1500 else:
1503 else:
1501 # clean up any optional leftovers
1504 # clean up any optional leftovers
1502 while expected.get(pos, None):
1505 while expected.get(pos, None):
1503 el = expected[pos].pop(0)
1506 el = expected[pos].pop(0)
1504 if el:
1507 if el:
1505 if not el.endswith(b" (?)\n"):
1508 if not el.endswith(b" (?)\n"):
1506 m = optline.match(el)
1509 m = optline.match(el)
1507 if m:
1510 if m:
1508 conditions = [c for c in m.group(2).split(b' ')]
1511 conditions = [c for c in m.group(2).split(b' ')]
1509
1512
1510 if self._iftest(conditions):
1513 if self._iftest(conditions):
1511 # Don't append as optional line
1514 # Don't append as optional line
1512 continue
1515 continue
1513 else:
1516 else:
1514 continue
1517 continue
1515 postout.append(b' ' + el)
1518 postout.append(b' ' + el)
1516
1519
1517 if lcmd:
1520 if lcmd:
1518 # Add on last return code.
1521 # Add on last return code.
1519 ret = int(lcmd.split()[1])
1522 ret = int(lcmd.split()[1])
1520 if ret != 0:
1523 if ret != 0:
1521 postout.append(b' [%d]\n' % ret)
1524 postout.append(b' [%d]\n' % ret)
1522 if pos in after:
1525 if pos in after:
1523 # Merge in non-active test bits.
1526 # Merge in non-active test bits.
1524 postout += after.pop(pos)
1527 postout += after.pop(pos)
1525 pos = int(lcmd.split()[0])
1528 pos = int(lcmd.split()[0])
1526
1529
1527 if pos in after:
1530 if pos in after:
1528 postout += after.pop(pos)
1531 postout += after.pop(pos)
1529
1532
1530 if warnonly == 2:
1533 if warnonly == 2:
1531 exitcode = False # Set exitcode to warned.
1534 exitcode = False # Set exitcode to warned.
1532
1535
1533 return exitcode, postout
1536 return exitcode, postout
1534
1537
1535 @staticmethod
1538 @staticmethod
1536 def rematch(el, l):
1539 def rematch(el, l):
1537 try:
1540 try:
1538 el = b'(?:' + el + b')'
1541 el = b'(?:' + el + b')'
1539 # use \Z to ensure that the regex matches to the end of the string
1542 # use \Z to ensure that the regex matches to the end of the string
1540 if os.name == 'nt':
1543 if os.name == 'nt':
1541 return re.match(el + br'\r?\n\Z', l)
1544 return re.match(el + br'\r?\n\Z', l)
1542 return re.match(el + br'\n\Z', l)
1545 return re.match(el + br'\n\Z', l)
1543 except re.error:
1546 except re.error:
1544 # el is an invalid regex
1547 # el is an invalid regex
1545 return False
1548 return False
1546
1549
1547 @staticmethod
1550 @staticmethod
1548 def globmatch(el, l):
1551 def globmatch(el, l):
1549 # The only supported special characters are * and ? plus / which also
1552 # The only supported special characters are * and ? plus / which also
1550 # matches \ on windows. Escaping of these characters is supported.
1553 # matches \ on windows. Escaping of these characters is supported.
1551 if el + b'\n' == l:
1554 if el + b'\n' == l:
1552 if os.altsep:
1555 if os.altsep:
1553 # matching on "/" is not needed for this line
1556 # matching on "/" is not needed for this line
1554 for pat in checkcodeglobpats:
1557 for pat in checkcodeglobpats:
1555 if pat.match(el):
1558 if pat.match(el):
1556 return True
1559 return True
1557 return b'-glob'
1560 return b'-glob'
1558 return True
1561 return True
1559 el = el.replace(b'$LOCALIP', b'*')
1562 el = el.replace(b'$LOCALIP', b'*')
1560 i, n = 0, len(el)
1563 i, n = 0, len(el)
1561 res = b''
1564 res = b''
1562 while i < n:
1565 while i < n:
1563 c = el[i:i + 1]
1566 c = el[i:i + 1]
1564 i += 1
1567 i += 1
1565 if c == b'\\' and i < n and el[i:i + 1] in b'*?\\/':
1568 if c == b'\\' and i < n and el[i:i + 1] in b'*?\\/':
1566 res += el[i - 1:i + 1]
1569 res += el[i - 1:i + 1]
1567 i += 1
1570 i += 1
1568 elif c == b'*':
1571 elif c == b'*':
1569 res += b'.*'
1572 res += b'.*'
1570 elif c == b'?':
1573 elif c == b'?':
1571 res += b'.'
1574 res += b'.'
1572 elif c == b'/' and os.altsep:
1575 elif c == b'/' and os.altsep:
1573 res += b'[/\\\\]'
1576 res += b'[/\\\\]'
1574 else:
1577 else:
1575 res += re.escape(c)
1578 res += re.escape(c)
1576 return TTest.rematch(res, l)
1579 return TTest.rematch(res, l)
1577
1580
1578 def linematch(self, el, l):
1581 def linematch(self, el, l):
1579 retry = False
1582 retry = False
1580 if el == l: # perfect match (fast)
1583 if el == l: # perfect match (fast)
1581 return True
1584 return True
1582 if el:
1585 if el:
1583 if el.endswith(b" (?)\n"):
1586 if el.endswith(b" (?)\n"):
1584 retry = "retry"
1587 retry = "retry"
1585 el = el[:-5] + b"\n"
1588 el = el[:-5] + b"\n"
1586 else:
1589 else:
1587 m = optline.match(el)
1590 m = optline.match(el)
1588 if m:
1591 if m:
1589 conditions = [c for c in m.group(2).split(b' ')]
1592 conditions = [c for c in m.group(2).split(b' ')]
1590
1593
1591 el = m.group(1) + b"\n"
1594 el = m.group(1) + b"\n"
1592 if not self._iftest(conditions):
1595 if not self._iftest(conditions):
1593 retry = "retry" # Not required by listed features
1596 retry = "retry" # Not required by listed features
1594
1597
1595 if el.endswith(b" (esc)\n"):
1598 if el.endswith(b" (esc)\n"):
1596 if PYTHON3:
1599 if PYTHON3:
1597 el = el[:-7].decode('unicode_escape') + '\n'
1600 el = el[:-7].decode('unicode_escape') + '\n'
1598 el = el.encode('utf-8')
1601 el = el.encode('utf-8')
1599 else:
1602 else:
1600 el = el[:-7].decode('string-escape') + '\n'
1603 el = el[:-7].decode('string-escape') + '\n'
1601 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
1604 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
1602 return True
1605 return True
1603 if el.endswith(b" (re)\n"):
1606 if el.endswith(b" (re)\n"):
1604 return TTest.rematch(el[:-6], l) or retry
1607 return TTest.rematch(el[:-6], l) or retry
1605 if el.endswith(b" (glob)\n"):
1608 if el.endswith(b" (glob)\n"):
1606 # ignore '(glob)' added to l by 'replacements'
1609 # ignore '(glob)' added to l by 'replacements'
1607 if l.endswith(b" (glob)\n"):
1610 if l.endswith(b" (glob)\n"):
1608 l = l[:-8] + b"\n"
1611 l = l[:-8] + b"\n"
1609 return TTest.globmatch(el[:-8], l) or retry
1612 return TTest.globmatch(el[:-8], l) or retry
1610 if os.altsep and l.replace(b'\\', b'/') == el:
1613 if os.altsep and l.replace(b'\\', b'/') == el:
1611 return b'+glob'
1614 return b'+glob'
1612 return retry
1615 return retry
1613
1616
1614 @staticmethod
1617 @staticmethod
1615 def parsehghaveoutput(lines):
1618 def parsehghaveoutput(lines):
1616 '''Parse hghave log lines.
1619 '''Parse hghave log lines.
1617
1620
1618 Return tuple of lists (missing, failed):
1621 Return tuple of lists (missing, failed):
1619 * the missing/unknown features
1622 * the missing/unknown features
1620 * the features for which existence check failed'''
1623 * the features for which existence check failed'''
1621 missing = []
1624 missing = []
1622 failed = []
1625 failed = []
1623 for line in lines:
1626 for line in lines:
1624 if line.startswith(TTest.SKIPPED_PREFIX):
1627 if line.startswith(TTest.SKIPPED_PREFIX):
1625 line = line.splitlines()[0]
1628 line = line.splitlines()[0]
1626 missing.append(line[len(TTest.SKIPPED_PREFIX):].decode('utf-8'))
1629 missing.append(line[len(TTest.SKIPPED_PREFIX):].decode('utf-8'))
1627 elif line.startswith(TTest.FAILED_PREFIX):
1630 elif line.startswith(TTest.FAILED_PREFIX):
1628 line = line.splitlines()[0]
1631 line = line.splitlines()[0]
1629 failed.append(line[len(TTest.FAILED_PREFIX):].decode('utf-8'))
1632 failed.append(line[len(TTest.FAILED_PREFIX):].decode('utf-8'))
1630
1633
1631 return missing, failed
1634 return missing, failed
1632
1635
1633 @staticmethod
1636 @staticmethod
1634 def _escapef(m):
1637 def _escapef(m):
1635 return TTest.ESCAPEMAP[m.group(0)]
1638 return TTest.ESCAPEMAP[m.group(0)]
1636
1639
1637 @staticmethod
1640 @staticmethod
1638 def _stringescape(s):
1641 def _stringescape(s):
1639 return TTest.ESCAPESUB(TTest._escapef, s)
1642 return TTest.ESCAPESUB(TTest._escapef, s)
1640
1643
1641 iolock = threading.RLock()
1644 iolock = threading.RLock()
1642
1645
1643 class TestResult(unittest._TextTestResult):
1646 class TestResult(unittest._TextTestResult):
1644 """Holds results when executing via unittest."""
1647 """Holds results when executing via unittest."""
1645 # Don't worry too much about accessing the non-public _TextTestResult.
1648 # Don't worry too much about accessing the non-public _TextTestResult.
1646 # It is relatively common in Python testing tools.
1649 # It is relatively common in Python testing tools.
1647 def __init__(self, options, *args, **kwargs):
1650 def __init__(self, options, *args, **kwargs):
1648 super(TestResult, self).__init__(*args, **kwargs)
1651 super(TestResult, self).__init__(*args, **kwargs)
1649
1652
1650 self._options = options
1653 self._options = options
1651
1654
1652 # unittest.TestResult didn't have skipped until 2.7. We need to
1655 # unittest.TestResult didn't have skipped until 2.7. We need to
1653 # polyfill it.
1656 # polyfill it.
1654 self.skipped = []
1657 self.skipped = []
1655
1658
1656 # We have a custom "ignored" result that isn't present in any Python
1659 # We have a custom "ignored" result that isn't present in any Python
1657 # unittest implementation. It is very similar to skipped. It may make
1660 # unittest implementation. It is very similar to skipped. It may make
1658 # sense to map it into skip some day.
1661 # sense to map it into skip some day.
1659 self.ignored = []
1662 self.ignored = []
1660
1663
1661 self.times = []
1664 self.times = []
1662 self._firststarttime = None
1665 self._firststarttime = None
1663 # Data stored for the benefit of generating xunit reports.
1666 # Data stored for the benefit of generating xunit reports.
1664 self.successes = []
1667 self.successes = []
1665 self.faildata = {}
1668 self.faildata = {}
1666
1669
1667 if options.color == 'auto':
1670 if options.color == 'auto':
1668 self.color = pygmentspresent and self.stream.isatty()
1671 self.color = pygmentspresent and self.stream.isatty()
1669 elif options.color == 'never':
1672 elif options.color == 'never':
1670 self.color = False
1673 self.color = False
1671 else: # 'always', for testing purposes
1674 else: # 'always', for testing purposes
1672 self.color = pygmentspresent
1675 self.color = pygmentspresent
1673
1676
1674 def addFailure(self, test, reason):
1677 def addFailure(self, test, reason):
1675 self.failures.append((test, reason))
1678 self.failures.append((test, reason))
1676
1679
1677 if self._options.first:
1680 if self._options.first:
1678 self.stop()
1681 self.stop()
1679 else:
1682 else:
1680 with iolock:
1683 with iolock:
1681 if reason == "timed out":
1684 if reason == "timed out":
1682 self.stream.write('t')
1685 self.stream.write('t')
1683 else:
1686 else:
1684 if not self._options.nodiff:
1687 if not self._options.nodiff:
1685 self.stream.write('\n')
1688 self.stream.write('\n')
1686 # Exclude the '\n' from highlighting to lex correctly
1689 # Exclude the '\n' from highlighting to lex correctly
1687 formatted = 'ERROR: %s output changed\n' % test
1690 formatted = 'ERROR: %s output changed\n' % test
1688 self.stream.write(highlightmsg(formatted, self.color))
1691 self.stream.write(highlightmsg(formatted, self.color))
1689 self.stream.write('!')
1692 self.stream.write('!')
1690
1693
1691 self.stream.flush()
1694 self.stream.flush()
1692
1695
1693 def addSuccess(self, test):
1696 def addSuccess(self, test):
1694 with iolock:
1697 with iolock:
1695 super(TestResult, self).addSuccess(test)
1698 super(TestResult, self).addSuccess(test)
1696 self.successes.append(test)
1699 self.successes.append(test)
1697
1700
1698 def addError(self, test, err):
1701 def addError(self, test, err):
1699 super(TestResult, self).addError(test, err)
1702 super(TestResult, self).addError(test, err)
1700 if self._options.first:
1703 if self._options.first:
1701 self.stop()
1704 self.stop()
1702
1705
1703 # Polyfill.
1706 # Polyfill.
1704 def addSkip(self, test, reason):
1707 def addSkip(self, test, reason):
1705 self.skipped.append((test, reason))
1708 self.skipped.append((test, reason))
1706 with iolock:
1709 with iolock:
1707 if self.showAll:
1710 if self.showAll:
1708 self.stream.writeln('skipped %s' % reason)
1711 self.stream.writeln('skipped %s' % reason)
1709 else:
1712 else:
1710 self.stream.write('s')
1713 self.stream.write('s')
1711 self.stream.flush()
1714 self.stream.flush()
1712
1715
1713 def addIgnore(self, test, reason):
1716 def addIgnore(self, test, reason):
1714 self.ignored.append((test, reason))
1717 self.ignored.append((test, reason))
1715 with iolock:
1718 with iolock:
1716 if self.showAll:
1719 if self.showAll:
1717 self.stream.writeln('ignored %s' % reason)
1720 self.stream.writeln('ignored %s' % reason)
1718 else:
1721 else:
1719 if reason not in ('not retesting', "doesn't match keyword"):
1722 if reason not in ('not retesting', "doesn't match keyword"):
1720 self.stream.write('i')
1723 self.stream.write('i')
1721 else:
1724 else:
1722 self.testsRun += 1
1725 self.testsRun += 1
1723 self.stream.flush()
1726 self.stream.flush()
1724
1727
1725 def addOutputMismatch(self, test, ret, got, expected):
1728 def addOutputMismatch(self, test, ret, got, expected):
1726 """Record a mismatch in test output for a particular test."""
1729 """Record a mismatch in test output for a particular test."""
1727 if self.shouldStop:
1730 if self.shouldStop:
1728 # don't print, some other test case already failed and
1731 # don't print, some other test case already failed and
1729 # printed, we're just stale and probably failed due to our
1732 # printed, we're just stale and probably failed due to our
1730 # temp dir getting cleaned up.
1733 # temp dir getting cleaned up.
1731 return
1734 return
1732
1735
1733 accepted = False
1736 accepted = False
1734 lines = []
1737 lines = []
1735
1738
1736 with iolock:
1739 with iolock:
1737 if self._options.nodiff:
1740 if self._options.nodiff:
1738 pass
1741 pass
1739 elif self._options.view:
1742 elif self._options.view:
1740 v = self._options.view
1743 v = self._options.view
1741 if PYTHON3:
1744 if PYTHON3:
1742 v = _bytespath(v)
1745 v = _bytespath(v)
1743 os.system(b"%s %s %s" %
1746 os.system(b"%s %s %s" %
1744 (v, test.refpath, test.errpath))
1747 (v, test.refpath, test.errpath))
1745 else:
1748 else:
1746 servefail, lines = getdiff(expected, got,
1749 servefail, lines = getdiff(expected, got,
1747 test.refpath, test.errpath)
1750 test.refpath, test.errpath)
1748 if servefail:
1751 if servefail:
1749 raise test.failureException(
1752 raise test.failureException(
1750 'server failed to start (HGPORT=%s)' % test._startport)
1753 'server failed to start (HGPORT=%s)' % test._startport)
1751 else:
1754 else:
1752 self.stream.write('\n')
1755 self.stream.write('\n')
1753 for line in lines:
1756 for line in lines:
1754 line = highlightdiff(line, self.color)
1757 line = highlightdiff(line, self.color)
1755 if PYTHON3:
1758 if PYTHON3:
1756 self.stream.flush()
1759 self.stream.flush()
1757 self.stream.buffer.write(line)
1760 self.stream.buffer.write(line)
1758 self.stream.buffer.flush()
1761 self.stream.buffer.flush()
1759 else:
1762 else:
1760 self.stream.write(line)
1763 self.stream.write(line)
1761 self.stream.flush()
1764 self.stream.flush()
1762
1765
1763 # handle interactive prompt without releasing iolock
1766 # handle interactive prompt without releasing iolock
1764 if self._options.interactive:
1767 if self._options.interactive:
1765 if test.readrefout() != expected:
1768 if test.readrefout() != expected:
1766 self.stream.write(
1769 self.stream.write(
1767 'Reference output has changed (run again to prompt '
1770 'Reference output has changed (run again to prompt '
1768 'changes)')
1771 'changes)')
1769 else:
1772 else:
1770 self.stream.write('Accept this change? [n] ')
1773 self.stream.write('Accept this change? [n] ')
1771 answer = sys.stdin.readline().strip()
1774 answer = sys.stdin.readline().strip()
1772 if answer.lower() in ('y', 'yes'):
1775 if answer.lower() in ('y', 'yes'):
1773 if test.path.endswith(b'.t'):
1776 if test.path.endswith(b'.t'):
1774 rename(test.errpath, test.path)
1777 rename(test.errpath, test.path)
1775 else:
1778 else:
1776 rename(test.errpath, '%s.out' % test.path)
1779 rename(test.errpath, '%s.out' % test.path)
1777 accepted = True
1780 accepted = True
1778 if not accepted:
1781 if not accepted:
1779 self.faildata[test.name] = b''.join(lines)
1782 self.faildata[test.name] = b''.join(lines)
1780
1783
1781 return accepted
1784 return accepted
1782
1785
1783 def startTest(self, test):
1786 def startTest(self, test):
1784 super(TestResult, self).startTest(test)
1787 super(TestResult, self).startTest(test)
1785
1788
1786 # os.times module computes the user time and system time spent by
1789 # os.times module computes the user time and system time spent by
1787 # child's processes along with real elapsed time taken by a process.
1790 # child's processes along with real elapsed time taken by a process.
1788 # This module has one limitation. It can only work for Linux user
1791 # This module has one limitation. It can only work for Linux user
1789 # and not for Windows.
1792 # and not for Windows.
1790 test.started = os.times()
1793 test.started = os.times()
1791 if self._firststarttime is None: # thread racy but irrelevant
1794 if self._firststarttime is None: # thread racy but irrelevant
1792 self._firststarttime = test.started[4]
1795 self._firststarttime = test.started[4]
1793
1796
1794 def stopTest(self, test, interrupted=False):
1797 def stopTest(self, test, interrupted=False):
1795 super(TestResult, self).stopTest(test)
1798 super(TestResult, self).stopTest(test)
1796
1799
1797 test.stopped = os.times()
1800 test.stopped = os.times()
1798
1801
1799 starttime = test.started
1802 starttime = test.started
1800 endtime = test.stopped
1803 endtime = test.stopped
1801 origin = self._firststarttime
1804 origin = self._firststarttime
1802 self.times.append((test.name,
1805 self.times.append((test.name,
1803 endtime[2] - starttime[2], # user space CPU time
1806 endtime[2] - starttime[2], # user space CPU time
1804 endtime[3] - starttime[3], # sys space CPU time
1807 endtime[3] - starttime[3], # sys space CPU time
1805 endtime[4] - starttime[4], # real time
1808 endtime[4] - starttime[4], # real time
1806 starttime[4] - origin, # start date in run context
1809 starttime[4] - origin, # start date in run context
1807 endtime[4] - origin, # end date in run context
1810 endtime[4] - origin, # end date in run context
1808 ))
1811 ))
1809
1812
1810 if interrupted:
1813 if interrupted:
1811 with iolock:
1814 with iolock:
1812 self.stream.writeln('INTERRUPTED: %s (after %d seconds)' % (
1815 self.stream.writeln('INTERRUPTED: %s (after %d seconds)' % (
1813 test.name, self.times[-1][3]))
1816 test.name, self.times[-1][3]))
1814
1817
1815 class TestSuite(unittest.TestSuite):
1818 class TestSuite(unittest.TestSuite):
1816 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
1819 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
1817
1820
1818 def __init__(self, testdir, jobs=1, whitelist=None, blacklist=None,
1821 def __init__(self, testdir, jobs=1, whitelist=None, blacklist=None,
1819 retest=False, keywords=None, loop=False, runs_per_test=1,
1822 retest=False, keywords=None, loop=False, runs_per_test=1,
1820 loadtest=None, showchannels=False,
1823 loadtest=None, showchannels=False,
1821 *args, **kwargs):
1824 *args, **kwargs):
1822 """Create a new instance that can run tests with a configuration.
1825 """Create a new instance that can run tests with a configuration.
1823
1826
1824 testdir specifies the directory where tests are executed from. This
1827 testdir specifies the directory where tests are executed from. This
1825 is typically the ``tests`` directory from Mercurial's source
1828 is typically the ``tests`` directory from Mercurial's source
1826 repository.
1829 repository.
1827
1830
1828 jobs specifies the number of jobs to run concurrently. Each test
1831 jobs specifies the number of jobs to run concurrently. Each test
1829 executes on its own thread. Tests actually spawn new processes, so
1832 executes on its own thread. Tests actually spawn new processes, so
1830 state mutation should not be an issue.
1833 state mutation should not be an issue.
1831
1834
1832 If there is only one job, it will use the main thread.
1835 If there is only one job, it will use the main thread.
1833
1836
1834 whitelist and blacklist denote tests that have been whitelisted and
1837 whitelist and blacklist denote tests that have been whitelisted and
1835 blacklisted, respectively. These arguments don't belong in TestSuite.
1838 blacklisted, respectively. These arguments don't belong in TestSuite.
1836 Instead, whitelist and blacklist should be handled by the thing that
1839 Instead, whitelist and blacklist should be handled by the thing that
1837 populates the TestSuite with tests. They are present to preserve
1840 populates the TestSuite with tests. They are present to preserve
1838 backwards compatible behavior which reports skipped tests as part
1841 backwards compatible behavior which reports skipped tests as part
1839 of the results.
1842 of the results.
1840
1843
1841 retest denotes whether to retest failed tests. This arguably belongs
1844 retest denotes whether to retest failed tests. This arguably belongs
1842 outside of TestSuite.
1845 outside of TestSuite.
1843
1846
1844 keywords denotes key words that will be used to filter which tests
1847 keywords denotes key words that will be used to filter which tests
1845 to execute. This arguably belongs outside of TestSuite.
1848 to execute. This arguably belongs outside of TestSuite.
1846
1849
1847 loop denotes whether to loop over tests forever.
1850 loop denotes whether to loop over tests forever.
1848 """
1851 """
1849 super(TestSuite, self).__init__(*args, **kwargs)
1852 super(TestSuite, self).__init__(*args, **kwargs)
1850
1853
1851 self._jobs = jobs
1854 self._jobs = jobs
1852 self._whitelist = whitelist
1855 self._whitelist = whitelist
1853 self._blacklist = blacklist
1856 self._blacklist = blacklist
1854 self._retest = retest
1857 self._retest = retest
1855 self._keywords = keywords
1858 self._keywords = keywords
1856 self._loop = loop
1859 self._loop = loop
1857 self._runs_per_test = runs_per_test
1860 self._runs_per_test = runs_per_test
1858 self._loadtest = loadtest
1861 self._loadtest = loadtest
1859 self._showchannels = showchannels
1862 self._showchannels = showchannels
1860
1863
1861 def run(self, result):
1864 def run(self, result):
1862 # We have a number of filters that need to be applied. We do this
1865 # We have a number of filters that need to be applied. We do this
1863 # here instead of inside Test because it makes the running logic for
1866 # here instead of inside Test because it makes the running logic for
1864 # Test simpler.
1867 # Test simpler.
1865 tests = []
1868 tests = []
1866 num_tests = [0]
1869 num_tests = [0]
1867 for test in self._tests:
1870 for test in self._tests:
1868 def get():
1871 def get():
1869 num_tests[0] += 1
1872 num_tests[0] += 1
1870 if getattr(test, 'should_reload', False):
1873 if getattr(test, 'should_reload', False):
1871 return self._loadtest(test, num_tests[0])
1874 return self._loadtest(test, num_tests[0])
1872 return test
1875 return test
1873 if not os.path.exists(test.path):
1876 if not os.path.exists(test.path):
1874 result.addSkip(test, "Doesn't exist")
1877 result.addSkip(test, "Doesn't exist")
1875 continue
1878 continue
1876
1879
1877 if not (self._whitelist and test.bname in self._whitelist):
1880 if not (self._whitelist and test.bname in self._whitelist):
1878 if self._blacklist and test.bname in self._blacklist:
1881 if self._blacklist and test.bname in self._blacklist:
1879 result.addSkip(test, 'blacklisted')
1882 result.addSkip(test, 'blacklisted')
1880 continue
1883 continue
1881
1884
1882 if self._retest and not os.path.exists(test.errpath):
1885 if self._retest and not os.path.exists(test.errpath):
1883 result.addIgnore(test, 'not retesting')
1886 result.addIgnore(test, 'not retesting')
1884 continue
1887 continue
1885
1888
1886 if self._keywords:
1889 if self._keywords:
1887 f = open(test.path, 'rb')
1890 f = open(test.path, 'rb')
1888 t = f.read().lower() + test.bname.lower()
1891 t = f.read().lower() + test.bname.lower()
1889 f.close()
1892 f.close()
1890 ignored = False
1893 ignored = False
1891 for k in self._keywords.lower().split():
1894 for k in self._keywords.lower().split():
1892 if k not in t:
1895 if k not in t:
1893 result.addIgnore(test, "doesn't match keyword")
1896 result.addIgnore(test, "doesn't match keyword")
1894 ignored = True
1897 ignored = True
1895 break
1898 break
1896
1899
1897 if ignored:
1900 if ignored:
1898 continue
1901 continue
1899 for _ in xrange(self._runs_per_test):
1902 for _ in xrange(self._runs_per_test):
1900 tests.append(get())
1903 tests.append(get())
1901
1904
1902 runtests = list(tests)
1905 runtests = list(tests)
1903 done = queue.Queue()
1906 done = queue.Queue()
1904 running = 0
1907 running = 0
1905
1908
1906 channels = [""] * self._jobs
1909 channels = [""] * self._jobs
1907
1910
1908 def job(test, result):
1911 def job(test, result):
1909 for n, v in enumerate(channels):
1912 for n, v in enumerate(channels):
1910 if not v:
1913 if not v:
1911 channel = n
1914 channel = n
1912 break
1915 break
1913 else:
1916 else:
1914 raise ValueError('Could not find output channel')
1917 raise ValueError('Could not find output channel')
1915 channels[channel] = "=" + test.name[5:].split(".")[0]
1918 channels[channel] = "=" + test.name[5:].split(".")[0]
1916 try:
1919 try:
1917 test(result)
1920 test(result)
1918 done.put(None)
1921 done.put(None)
1919 except KeyboardInterrupt:
1922 except KeyboardInterrupt:
1920 pass
1923 pass
1921 except: # re-raises
1924 except: # re-raises
1922 done.put(('!', test, 'run-test raised an error, see traceback'))
1925 done.put(('!', test, 'run-test raised an error, see traceback'))
1923 raise
1926 raise
1924 finally:
1927 finally:
1925 try:
1928 try:
1926 channels[channel] = ''
1929 channels[channel] = ''
1927 except IndexError:
1930 except IndexError:
1928 pass
1931 pass
1929
1932
1930 def stat():
1933 def stat():
1931 count = 0
1934 count = 0
1932 while channels:
1935 while channels:
1933 d = '\n%03s ' % count
1936 d = '\n%03s ' % count
1934 for n, v in enumerate(channels):
1937 for n, v in enumerate(channels):
1935 if v:
1938 if v:
1936 d += v[0]
1939 d += v[0]
1937 channels[n] = v[1:] or '.'
1940 channels[n] = v[1:] or '.'
1938 else:
1941 else:
1939 d += ' '
1942 d += ' '
1940 d += ' '
1943 d += ' '
1941 with iolock:
1944 with iolock:
1942 sys.stdout.write(d + ' ')
1945 sys.stdout.write(d + ' ')
1943 sys.stdout.flush()
1946 sys.stdout.flush()
1944 for x in xrange(10):
1947 for x in xrange(10):
1945 if channels:
1948 if channels:
1946 time.sleep(.1)
1949 time.sleep(.1)
1947 count += 1
1950 count += 1
1948
1951
1949 stoppedearly = False
1952 stoppedearly = False
1950
1953
1951 if self._showchannels:
1954 if self._showchannels:
1952 statthread = threading.Thread(target=stat, name="stat")
1955 statthread = threading.Thread(target=stat, name="stat")
1953 statthread.start()
1956 statthread.start()
1954
1957
1955 try:
1958 try:
1956 while tests or running:
1959 while tests or running:
1957 if not done.empty() or running == self._jobs or not tests:
1960 if not done.empty() or running == self._jobs or not tests:
1958 try:
1961 try:
1959 done.get(True, 1)
1962 done.get(True, 1)
1960 running -= 1
1963 running -= 1
1961 if result and result.shouldStop:
1964 if result and result.shouldStop:
1962 stoppedearly = True
1965 stoppedearly = True
1963 break
1966 break
1964 except queue.Empty:
1967 except queue.Empty:
1965 continue
1968 continue
1966 if tests and not running == self._jobs:
1969 if tests and not running == self._jobs:
1967 test = tests.pop(0)
1970 test = tests.pop(0)
1968 if self._loop:
1971 if self._loop:
1969 if getattr(test, 'should_reload', False):
1972 if getattr(test, 'should_reload', False):
1970 num_tests[0] += 1
1973 num_tests[0] += 1
1971 tests.append(
1974 tests.append(
1972 self._loadtest(test, num_tests[0]))
1975 self._loadtest(test, num_tests[0]))
1973 else:
1976 else:
1974 tests.append(test)
1977 tests.append(test)
1975 if self._jobs == 1:
1978 if self._jobs == 1:
1976 job(test, result)
1979 job(test, result)
1977 else:
1980 else:
1978 t = threading.Thread(target=job, name=test.name,
1981 t = threading.Thread(target=job, name=test.name,
1979 args=(test, result))
1982 args=(test, result))
1980 t.start()
1983 t.start()
1981 running += 1
1984 running += 1
1982
1985
1983 # If we stop early we still need to wait on started tests to
1986 # If we stop early we still need to wait on started tests to
1984 # finish. Otherwise, there is a race between the test completing
1987 # finish. Otherwise, there is a race between the test completing
1985 # and the test's cleanup code running. This could result in the
1988 # and the test's cleanup code running. This could result in the
1986 # test reporting incorrect.
1989 # test reporting incorrect.
1987 if stoppedearly:
1990 if stoppedearly:
1988 while running:
1991 while running:
1989 try:
1992 try:
1990 done.get(True, 1)
1993 done.get(True, 1)
1991 running -= 1
1994 running -= 1
1992 except queue.Empty:
1995 except queue.Empty:
1993 continue
1996 continue
1994 except KeyboardInterrupt:
1997 except KeyboardInterrupt:
1995 for test in runtests:
1998 for test in runtests:
1996 test.abort()
1999 test.abort()
1997
2000
1998 channels = []
2001 channels = []
1999
2002
2000 return result
2003 return result
2001
2004
2002 # Save the most recent 5 wall-clock runtimes of each test to a
2005 # Save the most recent 5 wall-clock runtimes of each test to a
2003 # human-readable text file named .testtimes. Tests are sorted
2006 # human-readable text file named .testtimes. Tests are sorted
2004 # alphabetically, while times for each test are listed from oldest to
2007 # alphabetically, while times for each test are listed from oldest to
2005 # newest.
2008 # newest.
2006
2009
2007 def loadtimes(outputdir):
2010 def loadtimes(outputdir):
2008 times = []
2011 times = []
2009 try:
2012 try:
2010 with open(os.path.join(outputdir, b'.testtimes-')) as fp:
2013 with open(os.path.join(outputdir, b'.testtimes-')) as fp:
2011 for line in fp:
2014 for line in fp:
2012 ts = line.split()
2015 ts = line.split()
2013 times.append((ts[0], [float(t) for t in ts[1:]]))
2016 times.append((ts[0], [float(t) for t in ts[1:]]))
2014 except IOError as err:
2017 except IOError as err:
2015 if err.errno != errno.ENOENT:
2018 if err.errno != errno.ENOENT:
2016 raise
2019 raise
2017 return times
2020 return times
2018
2021
2019 def savetimes(outputdir, result):
2022 def savetimes(outputdir, result):
2020 saved = dict(loadtimes(outputdir))
2023 saved = dict(loadtimes(outputdir))
2021 maxruns = 5
2024 maxruns = 5
2022 skipped = set([str(t[0]) for t in result.skipped])
2025 skipped = set([str(t[0]) for t in result.skipped])
2023 for tdata in result.times:
2026 for tdata in result.times:
2024 test, real = tdata[0], tdata[3]
2027 test, real = tdata[0], tdata[3]
2025 if test not in skipped:
2028 if test not in skipped:
2026 ts = saved.setdefault(test, [])
2029 ts = saved.setdefault(test, [])
2027 ts.append(real)
2030 ts.append(real)
2028 ts[:] = ts[-maxruns:]
2031 ts[:] = ts[-maxruns:]
2029
2032
2030 fd, tmpname = tempfile.mkstemp(prefix=b'.testtimes',
2033 fd, tmpname = tempfile.mkstemp(prefix=b'.testtimes',
2031 dir=outputdir, text=True)
2034 dir=outputdir, text=True)
2032 with os.fdopen(fd, 'w') as fp:
2035 with os.fdopen(fd, 'w') as fp:
2033 for name, ts in sorted(saved.items()):
2036 for name, ts in sorted(saved.items()):
2034 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
2037 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
2035 timepath = os.path.join(outputdir, b'.testtimes')
2038 timepath = os.path.join(outputdir, b'.testtimes')
2036 try:
2039 try:
2037 os.unlink(timepath)
2040 os.unlink(timepath)
2038 except OSError:
2041 except OSError:
2039 pass
2042 pass
2040 try:
2043 try:
2041 os.rename(tmpname, timepath)
2044 os.rename(tmpname, timepath)
2042 except OSError:
2045 except OSError:
2043 pass
2046 pass
2044
2047
2045 class TextTestRunner(unittest.TextTestRunner):
2048 class TextTestRunner(unittest.TextTestRunner):
2046 """Custom unittest test runner that uses appropriate settings."""
2049 """Custom unittest test runner that uses appropriate settings."""
2047
2050
2048 def __init__(self, runner, *args, **kwargs):
2051 def __init__(self, runner, *args, **kwargs):
2049 super(TextTestRunner, self).__init__(*args, **kwargs)
2052 super(TextTestRunner, self).__init__(*args, **kwargs)
2050
2053
2051 self._runner = runner
2054 self._runner = runner
2052
2055
2053 def listtests(self, test):
2056 def listtests(self, test):
2054 result = TestResult(self._runner.options, self.stream,
2057 result = TestResult(self._runner.options, self.stream,
2055 self.descriptions, 0)
2058 self.descriptions, 0)
2056 test = sorted(test, key=lambda t: t.name)
2059 test = sorted(test, key=lambda t: t.name)
2057 for t in test:
2060 for t in test:
2058 print(t.name)
2061 print(t.name)
2059 result.addSuccess(t)
2062 result.addSuccess(t)
2060
2063
2061 if self._runner.options.xunit:
2064 if self._runner.options.xunit:
2062 with open(self._runner.options.xunit, "wb") as xuf:
2065 with open(self._runner.options.xunit, "wb") as xuf:
2063 self._writexunit(result, xuf)
2066 self._writexunit(result, xuf)
2064
2067
2065 if self._runner.options.json:
2068 if self._runner.options.json:
2066 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2069 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2067 with open(jsonpath, 'w') as fp:
2070 with open(jsonpath, 'w') as fp:
2068 self._writejson(result, fp)
2071 self._writejson(result, fp)
2069
2072
2070 return result
2073 return result
2071
2074
2072 def run(self, test):
2075 def run(self, test):
2073 result = TestResult(self._runner.options, self.stream,
2076 result = TestResult(self._runner.options, self.stream,
2074 self.descriptions, self.verbosity)
2077 self.descriptions, self.verbosity)
2075
2078
2076 test(result)
2079 test(result)
2077
2080
2078 failed = len(result.failures)
2081 failed = len(result.failures)
2079 skipped = len(result.skipped)
2082 skipped = len(result.skipped)
2080 ignored = len(result.ignored)
2083 ignored = len(result.ignored)
2081
2084
2082 with iolock:
2085 with iolock:
2083 self.stream.writeln('')
2086 self.stream.writeln('')
2084
2087
2085 if not self._runner.options.noskips:
2088 if not self._runner.options.noskips:
2086 for test, msg in result.skipped:
2089 for test, msg in result.skipped:
2087 formatted = 'Skipped %s: %s\n' % (test.name, msg)
2090 formatted = 'Skipped %s: %s\n' % (test.name, msg)
2088 self.stream.write(highlightmsg(formatted, result.color))
2091 self.stream.write(highlightmsg(formatted, result.color))
2089 for test, msg in result.failures:
2092 for test, msg in result.failures:
2090 formatted = 'Failed %s: %s\n' % (test.name, msg)
2093 formatted = 'Failed %s: %s\n' % (test.name, msg)
2091 self.stream.write(highlightmsg(formatted, result.color))
2094 self.stream.write(highlightmsg(formatted, result.color))
2092 for test, msg in result.errors:
2095 for test, msg in result.errors:
2093 self.stream.writeln('Errored %s: %s' % (test.name, msg))
2096 self.stream.writeln('Errored %s: %s' % (test.name, msg))
2094
2097
2095 if self._runner.options.xunit:
2098 if self._runner.options.xunit:
2096 with open(self._runner.options.xunit, "wb") as xuf:
2099 with open(self._runner.options.xunit, "wb") as xuf:
2097 self._writexunit(result, xuf)
2100 self._writexunit(result, xuf)
2098
2101
2099 if self._runner.options.json:
2102 if self._runner.options.json:
2100 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2103 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2101 with open(jsonpath, 'w') as fp:
2104 with open(jsonpath, 'w') as fp:
2102 self._writejson(result, fp)
2105 self._writejson(result, fp)
2103
2106
2104 self._runner._checkhglib('Tested')
2107 self._runner._checkhglib('Tested')
2105
2108
2106 savetimes(self._runner._outputdir, result)
2109 savetimes(self._runner._outputdir, result)
2107
2110
2108 if failed and self._runner.options.known_good_rev:
2111 if failed and self._runner.options.known_good_rev:
2109 self._bisecttests(t for t, m in result.failures)
2112 self._bisecttests(t for t, m in result.failures)
2110 self.stream.writeln(
2113 self.stream.writeln(
2111 '# Ran %d tests, %d skipped, %d failed.'
2114 '# Ran %d tests, %d skipped, %d failed.'
2112 % (result.testsRun, skipped + ignored, failed))
2115 % (result.testsRun, skipped + ignored, failed))
2113 if failed:
2116 if failed:
2114 self.stream.writeln('python hash seed: %s' %
2117 self.stream.writeln('python hash seed: %s' %
2115 os.environ['PYTHONHASHSEED'])
2118 os.environ['PYTHONHASHSEED'])
2116 if self._runner.options.time:
2119 if self._runner.options.time:
2117 self.printtimes(result.times)
2120 self.printtimes(result.times)
2118
2121
2119 if self._runner.options.exceptions:
2122 if self._runner.options.exceptions:
2120 exceptions = aggregateexceptions(
2123 exceptions = aggregateexceptions(
2121 os.path.join(self._runner._outputdir, b'exceptions'))
2124 os.path.join(self._runner._outputdir, b'exceptions'))
2122 total = sum(exceptions.values())
2125 total = sum(exceptions.values())
2123
2126
2124 self.stream.writeln('Exceptions Report:')
2127 self.stream.writeln('Exceptions Report:')
2125 self.stream.writeln('%d total from %d frames' %
2128 self.stream.writeln('%d total from %d frames' %
2126 (total, len(exceptions)))
2129 (total, len(exceptions)))
2127 for (frame, line, exc), count in exceptions.most_common():
2130 for (frame, line, exc), count in exceptions.most_common():
2128 self.stream.writeln('%d\t%s: %s' % (count, frame, exc))
2131 self.stream.writeln('%d\t%s: %s' % (count, frame, exc))
2129
2132
2130 self.stream.flush()
2133 self.stream.flush()
2131
2134
2132 return result
2135 return result
2133
2136
2134 def _bisecttests(self, tests):
2137 def _bisecttests(self, tests):
2135 bisectcmd = ['hg', 'bisect']
2138 bisectcmd = ['hg', 'bisect']
2136 bisectrepo = self._runner.options.bisect_repo
2139 bisectrepo = self._runner.options.bisect_repo
2137 if bisectrepo:
2140 if bisectrepo:
2138 bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
2141 bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
2139 def pread(args):
2142 def pread(args):
2140 env = os.environ.copy()
2143 env = os.environ.copy()
2141 env['HGPLAIN'] = '1'
2144 env['HGPLAIN'] = '1'
2142 p = subprocess.Popen(args, stderr=subprocess.STDOUT,
2145 p = subprocess.Popen(args, stderr=subprocess.STDOUT,
2143 stdout=subprocess.PIPE, env=env)
2146 stdout=subprocess.PIPE, env=env)
2144 data = p.stdout.read()
2147 data = p.stdout.read()
2145 p.wait()
2148 p.wait()
2146 return data
2149 return data
2147 for test in tests:
2150 for test in tests:
2148 pread(bisectcmd + ['--reset']),
2151 pread(bisectcmd + ['--reset']),
2149 pread(bisectcmd + ['--bad', '.'])
2152 pread(bisectcmd + ['--bad', '.'])
2150 pread(bisectcmd + ['--good', self._runner.options.known_good_rev])
2153 pread(bisectcmd + ['--good', self._runner.options.known_good_rev])
2151 # TODO: we probably need to forward more options
2154 # TODO: we probably need to forward more options
2152 # that alter hg's behavior inside the tests.
2155 # that alter hg's behavior inside the tests.
2153 opts = ''
2156 opts = ''
2154 withhg = self._runner.options.with_hg
2157 withhg = self._runner.options.with_hg
2155 if withhg:
2158 if withhg:
2156 opts += ' --with-hg=%s ' % shellquote(_strpath(withhg))
2159 opts += ' --with-hg=%s ' % shellquote(_strpath(withhg))
2157 rtc = '%s %s %s %s' % (sys.executable, sys.argv[0], opts,
2160 rtc = '%s %s %s %s' % (sys.executable, sys.argv[0], opts,
2158 test)
2161 test)
2159 data = pread(bisectcmd + ['--command', rtc])
2162 data = pread(bisectcmd + ['--command', rtc])
2160 m = re.search(
2163 m = re.search(
2161 (br'\nThe first (?P<goodbad>bad|good) revision '
2164 (br'\nThe first (?P<goodbad>bad|good) revision '
2162 br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
2165 br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
2163 br'summary: +(?P<summary>[^\n]+)\n'),
2166 br'summary: +(?P<summary>[^\n]+)\n'),
2164 data, (re.MULTILINE | re.DOTALL))
2167 data, (re.MULTILINE | re.DOTALL))
2165 if m is None:
2168 if m is None:
2166 self.stream.writeln(
2169 self.stream.writeln(
2167 'Failed to identify failure point for %s' % test)
2170 'Failed to identify failure point for %s' % test)
2168 continue
2171 continue
2169 dat = m.groupdict()
2172 dat = m.groupdict()
2170 verb = 'broken' if dat['goodbad'] == 'bad' else 'fixed'
2173 verb = 'broken' if dat['goodbad'] == 'bad' else 'fixed'
2171 self.stream.writeln(
2174 self.stream.writeln(
2172 '%s %s by %s (%s)' % (
2175 '%s %s by %s (%s)' % (
2173 test, verb, dat['node'], dat['summary']))
2176 test, verb, dat['node'], dat['summary']))
2174
2177
2175 def printtimes(self, times):
2178 def printtimes(self, times):
2176 # iolock held by run
2179 # iolock held by run
2177 self.stream.writeln('# Producing time report')
2180 self.stream.writeln('# Producing time report')
2178 times.sort(key=lambda t: (t[3]))
2181 times.sort(key=lambda t: (t[3]))
2179 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2182 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2180 self.stream.writeln('%-7s %-7s %-7s %-7s %-7s %s' %
2183 self.stream.writeln('%-7s %-7s %-7s %-7s %-7s %s' %
2181 ('start', 'end', 'cuser', 'csys', 'real', 'Test'))
2184 ('start', 'end', 'cuser', 'csys', 'real', 'Test'))
2182 for tdata in times:
2185 for tdata in times:
2183 test = tdata[0]
2186 test = tdata[0]
2184 cuser, csys, real, start, end = tdata[1:6]
2187 cuser, csys, real, start, end = tdata[1:6]
2185 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2188 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2186
2189
2187 @staticmethod
2190 @staticmethod
2188 def _writexunit(result, outf):
2191 def _writexunit(result, outf):
2189 # See http://llg.cubic.org/docs/junit/ for a reference.
2192 # See http://llg.cubic.org/docs/junit/ for a reference.
2190 timesd = dict((t[0], t[3]) for t in result.times)
2193 timesd = dict((t[0], t[3]) for t in result.times)
2191 doc = minidom.Document()
2194 doc = minidom.Document()
2192 s = doc.createElement('testsuite')
2195 s = doc.createElement('testsuite')
2193 s.setAttribute('name', 'run-tests')
2196 s.setAttribute('name', 'run-tests')
2194 s.setAttribute('tests', str(result.testsRun))
2197 s.setAttribute('tests', str(result.testsRun))
2195 s.setAttribute('errors', "0") # TODO
2198 s.setAttribute('errors', "0") # TODO
2196 s.setAttribute('failures', str(len(result.failures)))
2199 s.setAttribute('failures', str(len(result.failures)))
2197 s.setAttribute('skipped', str(len(result.skipped) +
2200 s.setAttribute('skipped', str(len(result.skipped) +
2198 len(result.ignored)))
2201 len(result.ignored)))
2199 doc.appendChild(s)
2202 doc.appendChild(s)
2200 for tc in result.successes:
2203 for tc in result.successes:
2201 t = doc.createElement('testcase')
2204 t = doc.createElement('testcase')
2202 t.setAttribute('name', tc.name)
2205 t.setAttribute('name', tc.name)
2203 tctime = timesd.get(tc.name)
2206 tctime = timesd.get(tc.name)
2204 if tctime is not None:
2207 if tctime is not None:
2205 t.setAttribute('time', '%.3f' % tctime)
2208 t.setAttribute('time', '%.3f' % tctime)
2206 s.appendChild(t)
2209 s.appendChild(t)
2207 for tc, err in sorted(result.faildata.items()):
2210 for tc, err in sorted(result.faildata.items()):
2208 t = doc.createElement('testcase')
2211 t = doc.createElement('testcase')
2209 t.setAttribute('name', tc)
2212 t.setAttribute('name', tc)
2210 tctime = timesd.get(tc)
2213 tctime = timesd.get(tc)
2211 if tctime is not None:
2214 if tctime is not None:
2212 t.setAttribute('time', '%.3f' % tctime)
2215 t.setAttribute('time', '%.3f' % tctime)
2213 # createCDATASection expects a unicode or it will
2216 # createCDATASection expects a unicode or it will
2214 # convert using default conversion rules, which will
2217 # convert using default conversion rules, which will
2215 # fail if string isn't ASCII.
2218 # fail if string isn't ASCII.
2216 err = cdatasafe(err).decode('utf-8', 'replace')
2219 err = cdatasafe(err).decode('utf-8', 'replace')
2217 cd = doc.createCDATASection(err)
2220 cd = doc.createCDATASection(err)
2218 # Use 'failure' here instead of 'error' to match errors = 0,
2221 # Use 'failure' here instead of 'error' to match errors = 0,
2219 # failures = len(result.failures) in the testsuite element.
2222 # failures = len(result.failures) in the testsuite element.
2220 failelem = doc.createElement('failure')
2223 failelem = doc.createElement('failure')
2221 failelem.setAttribute('message', 'output changed')
2224 failelem.setAttribute('message', 'output changed')
2222 failelem.setAttribute('type', 'output-mismatch')
2225 failelem.setAttribute('type', 'output-mismatch')
2223 failelem.appendChild(cd)
2226 failelem.appendChild(cd)
2224 t.appendChild(failelem)
2227 t.appendChild(failelem)
2225 s.appendChild(t)
2228 s.appendChild(t)
2226 for tc, message in result.skipped:
2229 for tc, message in result.skipped:
2227 # According to the schema, 'skipped' has no attributes. So store
2230 # According to the schema, 'skipped' has no attributes. So store
2228 # the skip message as a text node instead.
2231 # the skip message as a text node instead.
2229 t = doc.createElement('testcase')
2232 t = doc.createElement('testcase')
2230 t.setAttribute('name', tc.name)
2233 t.setAttribute('name', tc.name)
2231 binmessage = message.encode('utf-8')
2234 binmessage = message.encode('utf-8')
2232 message = cdatasafe(binmessage).decode('utf-8', 'replace')
2235 message = cdatasafe(binmessage).decode('utf-8', 'replace')
2233 cd = doc.createCDATASection(message)
2236 cd = doc.createCDATASection(message)
2234 skipelem = doc.createElement('skipped')
2237 skipelem = doc.createElement('skipped')
2235 skipelem.appendChild(cd)
2238 skipelem.appendChild(cd)
2236 t.appendChild(skipelem)
2239 t.appendChild(skipelem)
2237 s.appendChild(t)
2240 s.appendChild(t)
2238 outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
2241 outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
2239
2242
2240 @staticmethod
2243 @staticmethod
2241 def _writejson(result, outf):
2244 def _writejson(result, outf):
2242 timesd = {}
2245 timesd = {}
2243 for tdata in result.times:
2246 for tdata in result.times:
2244 test = tdata[0]
2247 test = tdata[0]
2245 timesd[test] = tdata[1:]
2248 timesd[test] = tdata[1:]
2246
2249
2247 outcome = {}
2250 outcome = {}
2248 groups = [('success', ((tc, None)
2251 groups = [('success', ((tc, None)
2249 for tc in result.successes)),
2252 for tc in result.successes)),
2250 ('failure', result.failures),
2253 ('failure', result.failures),
2251 ('skip', result.skipped)]
2254 ('skip', result.skipped)]
2252 for res, testcases in groups:
2255 for res, testcases in groups:
2253 for tc, __ in testcases:
2256 for tc, __ in testcases:
2254 if tc.name in timesd:
2257 if tc.name in timesd:
2255 diff = result.faildata.get(tc.name, b'')
2258 diff = result.faildata.get(tc.name, b'')
2256 try:
2259 try:
2257 diff = diff.decode('unicode_escape')
2260 diff = diff.decode('unicode_escape')
2258 except UnicodeDecodeError as e:
2261 except UnicodeDecodeError as e:
2259 diff = '%r decoding diff, sorry' % e
2262 diff = '%r decoding diff, sorry' % e
2260 tres = {'result': res,
2263 tres = {'result': res,
2261 'time': ('%0.3f' % timesd[tc.name][2]),
2264 'time': ('%0.3f' % timesd[tc.name][2]),
2262 'cuser': ('%0.3f' % timesd[tc.name][0]),
2265 'cuser': ('%0.3f' % timesd[tc.name][0]),
2263 'csys': ('%0.3f' % timesd[tc.name][1]),
2266 'csys': ('%0.3f' % timesd[tc.name][1]),
2264 'start': ('%0.3f' % timesd[tc.name][3]),
2267 'start': ('%0.3f' % timesd[tc.name][3]),
2265 'end': ('%0.3f' % timesd[tc.name][4]),
2268 'end': ('%0.3f' % timesd[tc.name][4]),
2266 'diff': diff,
2269 'diff': diff,
2267 }
2270 }
2268 else:
2271 else:
2269 # blacklisted test
2272 # blacklisted test
2270 tres = {'result': res}
2273 tres = {'result': res}
2271
2274
2272 outcome[tc.name] = tres
2275 outcome[tc.name] = tres
2273 jsonout = json.dumps(outcome, sort_keys=True, indent=4,
2276 jsonout = json.dumps(outcome, sort_keys=True, indent=4,
2274 separators=(',', ': '))
2277 separators=(',', ': '))
2275 outf.writelines(("testreport =", jsonout))
2278 outf.writelines(("testreport =", jsonout))
2276
2279
2277 class TestRunner(object):
2280 class TestRunner(object):
2278 """Holds context for executing tests.
2281 """Holds context for executing tests.
2279
2282
2280 Tests rely on a lot of state. This object holds it for them.
2283 Tests rely on a lot of state. This object holds it for them.
2281 """
2284 """
2282
2285
2283 # Programs required to run tests.
2286 # Programs required to run tests.
2284 REQUIREDTOOLS = [
2287 REQUIREDTOOLS = [
2285 b'diff',
2288 b'diff',
2286 b'grep',
2289 b'grep',
2287 b'unzip',
2290 b'unzip',
2288 b'gunzip',
2291 b'gunzip',
2289 b'bunzip2',
2292 b'bunzip2',
2290 b'sed',
2293 b'sed',
2291 ]
2294 ]
2292
2295
2293 # Maps file extensions to test class.
2296 # Maps file extensions to test class.
2294 TESTTYPES = [
2297 TESTTYPES = [
2295 (b'.py', PythonTest),
2298 (b'.py', PythonTest),
2296 (b'.t', TTest),
2299 (b'.t', TTest),
2297 ]
2300 ]
2298
2301
2299 def __init__(self):
2302 def __init__(self):
2300 self.options = None
2303 self.options = None
2301 self._hgroot = None
2304 self._hgroot = None
2302 self._testdir = None
2305 self._testdir = None
2303 self._outputdir = None
2306 self._outputdir = None
2304 self._hgtmp = None
2307 self._hgtmp = None
2305 self._installdir = None
2308 self._installdir = None
2306 self._bindir = None
2309 self._bindir = None
2307 self._tmpbinddir = None
2310 self._tmpbinddir = None
2308 self._pythondir = None
2311 self._pythondir = None
2309 self._coveragefile = None
2312 self._coveragefile = None
2310 self._createdfiles = []
2313 self._createdfiles = []
2311 self._hgcommand = None
2314 self._hgcommand = None
2312 self._hgpath = None
2315 self._hgpath = None
2313 self._portoffset = 0
2316 self._portoffset = 0
2314 self._ports = {}
2317 self._ports = {}
2315
2318
2316 def run(self, args, parser=None):
2319 def run(self, args, parser=None):
2317 """Run the test suite."""
2320 """Run the test suite."""
2318 oldmask = os.umask(0o22)
2321 oldmask = os.umask(0o22)
2319 try:
2322 try:
2320 parser = parser or getparser()
2323 parser = parser or getparser()
2321 options = parseargs(args, parser)
2324 options = parseargs(args, parser)
2322 tests = [_bytespath(a) for a in options.tests]
2325 tests = [_bytespath(a) for a in options.tests]
2323 if options.test_list is not None:
2326 if options.test_list is not None:
2324 for listfile in options.test_list:
2327 for listfile in options.test_list:
2325 with open(listfile, 'rb') as f:
2328 with open(listfile, 'rb') as f:
2326 tests.extend(t for t in f.read().splitlines() if t)
2329 tests.extend(t for t in f.read().splitlines() if t)
2327 self.options = options
2330 self.options = options
2328
2331
2329 self._checktools()
2332 self._checktools()
2330 testdescs = self.findtests(tests)
2333 testdescs = self.findtests(tests)
2331 if options.profile_runner:
2334 if options.profile_runner:
2332 import statprof
2335 import statprof
2333 statprof.start()
2336 statprof.start()
2334 result = self._run(testdescs)
2337 result = self._run(testdescs)
2335 if options.profile_runner:
2338 if options.profile_runner:
2336 statprof.stop()
2339 statprof.stop()
2337 statprof.display()
2340 statprof.display()
2338 return result
2341 return result
2339
2342
2340 finally:
2343 finally:
2341 os.umask(oldmask)
2344 os.umask(oldmask)
2342
2345
2343 def _run(self, testdescs):
2346 def _run(self, testdescs):
2344 if self.options.random:
2347 if self.options.random:
2345 random.shuffle(testdescs)
2348 random.shuffle(testdescs)
2346 else:
2349 else:
2347 # keywords for slow tests
2350 # keywords for slow tests
2348 slow = {b'svn': 10,
2351 slow = {b'svn': 10,
2349 b'cvs': 10,
2352 b'cvs': 10,
2350 b'hghave': 10,
2353 b'hghave': 10,
2351 b'largefiles-update': 10,
2354 b'largefiles-update': 10,
2352 b'run-tests': 10,
2355 b'run-tests': 10,
2353 b'corruption': 10,
2356 b'corruption': 10,
2354 b'race': 10,
2357 b'race': 10,
2355 b'i18n': 10,
2358 b'i18n': 10,
2356 b'check': 100,
2359 b'check': 100,
2357 b'gendoc': 100,
2360 b'gendoc': 100,
2358 b'contrib-perf': 200,
2361 b'contrib-perf': 200,
2359 }
2362 }
2360 perf = {}
2363 perf = {}
2361 def sortkey(f):
2364 def sortkey(f):
2362 # run largest tests first, as they tend to take the longest
2365 # run largest tests first, as they tend to take the longest
2363 f = f['path']
2366 f = f['path']
2364 try:
2367 try:
2365 return perf[f]
2368 return perf[f]
2366 except KeyError:
2369 except KeyError:
2367 try:
2370 try:
2368 val = -os.stat(f).st_size
2371 val = -os.stat(f).st_size
2369 except OSError as e:
2372 except OSError as e:
2370 if e.errno != errno.ENOENT:
2373 if e.errno != errno.ENOENT:
2371 raise
2374 raise
2372 perf[f] = -1e9 # file does not exist, tell early
2375 perf[f] = -1e9 # file does not exist, tell early
2373 return -1e9
2376 return -1e9
2374 for kw, mul in slow.items():
2377 for kw, mul in slow.items():
2375 if kw in f:
2378 if kw in f:
2376 val *= mul
2379 val *= mul
2377 if f.endswith(b'.py'):
2380 if f.endswith(b'.py'):
2378 val /= 10.0
2381 val /= 10.0
2379 perf[f] = val / 1000.0
2382 perf[f] = val / 1000.0
2380 return perf[f]
2383 return perf[f]
2381 testdescs.sort(key=sortkey)
2384 testdescs.sort(key=sortkey)
2382
2385
2383 self._testdir = osenvironb[b'TESTDIR'] = getattr(
2386 self._testdir = osenvironb[b'TESTDIR'] = getattr(
2384 os, 'getcwdb', os.getcwd)()
2387 os, 'getcwdb', os.getcwd)()
2385 # assume all tests in same folder for now
2388 # assume all tests in same folder for now
2386 if testdescs:
2389 if testdescs:
2387 pathname = os.path.dirname(testdescs[0]['path'])
2390 pathname = os.path.dirname(testdescs[0]['path'])
2388 if pathname:
2391 if pathname:
2389 osenvironb[b'TESTDIR'] = os.path.join(osenvironb[b'TESTDIR'],
2392 osenvironb[b'TESTDIR'] = os.path.join(osenvironb[b'TESTDIR'],
2390 pathname)
2393 pathname)
2391 if self.options.outputdir:
2394 if self.options.outputdir:
2392 self._outputdir = canonpath(_bytespath(self.options.outputdir))
2395 self._outputdir = canonpath(_bytespath(self.options.outputdir))
2393 else:
2396 else:
2394 self._outputdir = self._testdir
2397 self._outputdir = self._testdir
2395 if testdescs and pathname:
2398 if testdescs and pathname:
2396 self._outputdir = os.path.join(self._outputdir, pathname)
2399 self._outputdir = os.path.join(self._outputdir, pathname)
2397
2400
2398 if 'PYTHONHASHSEED' not in os.environ:
2401 if 'PYTHONHASHSEED' not in os.environ:
2399 # use a random python hash seed all the time
2402 # use a random python hash seed all the time
2400 # we do the randomness ourself to know what seed is used
2403 # we do the randomness ourself to know what seed is used
2401 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
2404 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
2402
2405
2403 if self.options.tmpdir:
2406 if self.options.tmpdir:
2404 self.options.keep_tmpdir = True
2407 self.options.keep_tmpdir = True
2405 tmpdir = _bytespath(self.options.tmpdir)
2408 tmpdir = _bytespath(self.options.tmpdir)
2406 if os.path.exists(tmpdir):
2409 if os.path.exists(tmpdir):
2407 # Meaning of tmpdir has changed since 1.3: we used to create
2410 # Meaning of tmpdir has changed since 1.3: we used to create
2408 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
2411 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
2409 # tmpdir already exists.
2412 # tmpdir already exists.
2410 print("error: temp dir %r already exists" % tmpdir)
2413 print("error: temp dir %r already exists" % tmpdir)
2411 return 1
2414 return 1
2412
2415
2413 # Automatically removing tmpdir sounds convenient, but could
2416 # Automatically removing tmpdir sounds convenient, but could
2414 # really annoy anyone in the habit of using "--tmpdir=/tmp"
2417 # really annoy anyone in the habit of using "--tmpdir=/tmp"
2415 # or "--tmpdir=$HOME".
2418 # or "--tmpdir=$HOME".
2416 #vlog("# Removing temp dir", tmpdir)
2419 #vlog("# Removing temp dir", tmpdir)
2417 #shutil.rmtree(tmpdir)
2420 #shutil.rmtree(tmpdir)
2418 os.makedirs(tmpdir)
2421 os.makedirs(tmpdir)
2419 else:
2422 else:
2420 d = None
2423 d = None
2421 if os.name == 'nt':
2424 if os.name == 'nt':
2422 # without this, we get the default temp dir location, but
2425 # without this, we get the default temp dir location, but
2423 # in all lowercase, which causes troubles with paths (issue3490)
2426 # in all lowercase, which causes troubles with paths (issue3490)
2424 d = osenvironb.get(b'TMP', None)
2427 d = osenvironb.get(b'TMP', None)
2425 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
2428 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
2426
2429
2427 self._hgtmp = osenvironb[b'HGTMP'] = (
2430 self._hgtmp = osenvironb[b'HGTMP'] = (
2428 os.path.realpath(tmpdir))
2431 os.path.realpath(tmpdir))
2429
2432
2430 if self.options.with_hg:
2433 if self.options.with_hg:
2431 self._installdir = None
2434 self._installdir = None
2432 whg = self.options.with_hg
2435 whg = self.options.with_hg
2433 self._bindir = os.path.dirname(os.path.realpath(whg))
2436 self._bindir = os.path.dirname(os.path.realpath(whg))
2434 assert isinstance(self._bindir, bytes)
2437 assert isinstance(self._bindir, bytes)
2435 self._hgcommand = os.path.basename(whg)
2438 self._hgcommand = os.path.basename(whg)
2436 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
2439 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
2437 os.makedirs(self._tmpbindir)
2440 os.makedirs(self._tmpbindir)
2438
2441
2439 # This looks redundant with how Python initializes sys.path from
2442 # This looks redundant with how Python initializes sys.path from
2440 # the location of the script being executed. Needed because the
2443 # the location of the script being executed. Needed because the
2441 # "hg" specified by --with-hg is not the only Python script
2444 # "hg" specified by --with-hg is not the only Python script
2442 # executed in the test suite that needs to import 'mercurial'
2445 # executed in the test suite that needs to import 'mercurial'
2443 # ... which means it's not really redundant at all.
2446 # ... which means it's not really redundant at all.
2444 self._pythondir = self._bindir
2447 self._pythondir = self._bindir
2445 else:
2448 else:
2446 self._installdir = os.path.join(self._hgtmp, b"install")
2449 self._installdir = os.path.join(self._hgtmp, b"install")
2447 self._bindir = os.path.join(self._installdir, b"bin")
2450 self._bindir = os.path.join(self._installdir, b"bin")
2448 self._hgcommand = b'hg'
2451 self._hgcommand = b'hg'
2449 self._tmpbindir = self._bindir
2452 self._tmpbindir = self._bindir
2450 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
2453 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
2451
2454
2452 # set CHGHG, then replace "hg" command by "chg"
2455 # set CHGHG, then replace "hg" command by "chg"
2453 chgbindir = self._bindir
2456 chgbindir = self._bindir
2454 if self.options.chg or self.options.with_chg:
2457 if self.options.chg or self.options.with_chg:
2455 osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand)
2458 osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand)
2456 else:
2459 else:
2457 osenvironb.pop(b'CHGHG', None) # drop flag for hghave
2460 osenvironb.pop(b'CHGHG', None) # drop flag for hghave
2458 if self.options.chg:
2461 if self.options.chg:
2459 self._hgcommand = b'chg'
2462 self._hgcommand = b'chg'
2460 elif self.options.with_chg:
2463 elif self.options.with_chg:
2461 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
2464 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
2462 self._hgcommand = os.path.basename(self.options.with_chg)
2465 self._hgcommand = os.path.basename(self.options.with_chg)
2463
2466
2464 osenvironb[b"BINDIR"] = self._bindir
2467 osenvironb[b"BINDIR"] = self._bindir
2465 osenvironb[b"PYTHON"] = PYTHON
2468 osenvironb[b"PYTHON"] = PYTHON
2466
2469
2467 if self.options.with_python3:
2470 if self.options.with_python3:
2468 osenvironb[b'PYTHON3'] = self.options.with_python3
2471 osenvironb[b'PYTHON3'] = self.options.with_python3
2469
2472
2470 fileb = _bytespath(__file__)
2473 fileb = _bytespath(__file__)
2471 runtestdir = os.path.abspath(os.path.dirname(fileb))
2474 runtestdir = os.path.abspath(os.path.dirname(fileb))
2472 osenvironb[b'RUNTESTDIR'] = runtestdir
2475 osenvironb[b'RUNTESTDIR'] = runtestdir
2473 if PYTHON3:
2476 if PYTHON3:
2474 sepb = _bytespath(os.pathsep)
2477 sepb = _bytespath(os.pathsep)
2475 else:
2478 else:
2476 sepb = os.pathsep
2479 sepb = os.pathsep
2477 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
2480 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
2478 if os.path.islink(__file__):
2481 if os.path.islink(__file__):
2479 # test helper will likely be at the end of the symlink
2482 # test helper will likely be at the end of the symlink
2480 realfile = os.path.realpath(fileb)
2483 realfile = os.path.realpath(fileb)
2481 realdir = os.path.abspath(os.path.dirname(realfile))
2484 realdir = os.path.abspath(os.path.dirname(realfile))
2482 path.insert(2, realdir)
2485 path.insert(2, realdir)
2483 if chgbindir != self._bindir:
2486 if chgbindir != self._bindir:
2484 path.insert(1, chgbindir)
2487 path.insert(1, chgbindir)
2485 if self._testdir != runtestdir:
2488 if self._testdir != runtestdir:
2486 path = [self._testdir] + path
2489 path = [self._testdir] + path
2487 if self._tmpbindir != self._bindir:
2490 if self._tmpbindir != self._bindir:
2488 path = [self._tmpbindir] + path
2491 path = [self._tmpbindir] + path
2489 osenvironb[b"PATH"] = sepb.join(path)
2492 osenvironb[b"PATH"] = sepb.join(path)
2490
2493
2491 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
2494 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
2492 # can run .../tests/run-tests.py test-foo where test-foo
2495 # can run .../tests/run-tests.py test-foo where test-foo
2493 # adds an extension to HGRC. Also include run-test.py directory to
2496 # adds an extension to HGRC. Also include run-test.py directory to
2494 # import modules like heredoctest.
2497 # import modules like heredoctest.
2495 pypath = [self._pythondir, self._testdir, runtestdir]
2498 pypath = [self._pythondir, self._testdir, runtestdir]
2496 # We have to augment PYTHONPATH, rather than simply replacing
2499 # We have to augment PYTHONPATH, rather than simply replacing
2497 # it, in case external libraries are only available via current
2500 # it, in case external libraries are only available via current
2498 # PYTHONPATH. (In particular, the Subversion bindings on OS X
2501 # PYTHONPATH. (In particular, the Subversion bindings on OS X
2499 # are in /opt/subversion.)
2502 # are in /opt/subversion.)
2500 oldpypath = osenvironb.get(IMPL_PATH)
2503 oldpypath = osenvironb.get(IMPL_PATH)
2501 if oldpypath:
2504 if oldpypath:
2502 pypath.append(oldpypath)
2505 pypath.append(oldpypath)
2503 osenvironb[IMPL_PATH] = sepb.join(pypath)
2506 osenvironb[IMPL_PATH] = sepb.join(pypath)
2504
2507
2505 if self.options.pure:
2508 if self.options.pure:
2506 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
2509 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
2507 os.environ["HGMODULEPOLICY"] = "py"
2510 os.environ["HGMODULEPOLICY"] = "py"
2508
2511
2509 if self.options.allow_slow_tests:
2512 if self.options.allow_slow_tests:
2510 os.environ["HGTEST_SLOW"] = "slow"
2513 os.environ["HGTEST_SLOW"] = "slow"
2511 elif 'HGTEST_SLOW' in os.environ:
2514 elif 'HGTEST_SLOW' in os.environ:
2512 del os.environ['HGTEST_SLOW']
2515 del os.environ['HGTEST_SLOW']
2513
2516
2514 self._coveragefile = os.path.join(self._testdir, b'.coverage')
2517 self._coveragefile = os.path.join(self._testdir, b'.coverage')
2515
2518
2516 if self.options.exceptions:
2519 if self.options.exceptions:
2517 exceptionsdir = os.path.join(self._outputdir, b'exceptions')
2520 exceptionsdir = os.path.join(self._outputdir, b'exceptions')
2518 try:
2521 try:
2519 os.makedirs(exceptionsdir)
2522 os.makedirs(exceptionsdir)
2520 except OSError as e:
2523 except OSError as e:
2521 if e.errno != errno.EEXIST:
2524 if e.errno != errno.EEXIST:
2522 raise
2525 raise
2523
2526
2524 # Remove all existing exception reports.
2527 # Remove all existing exception reports.
2525 for f in os.listdir(exceptionsdir):
2528 for f in os.listdir(exceptionsdir):
2526 os.unlink(os.path.join(exceptionsdir, f))
2529 os.unlink(os.path.join(exceptionsdir, f))
2527
2530
2528 osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
2531 osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
2529 logexceptions = os.path.join(self._testdir, b'logexceptions.py')
2532 logexceptions = os.path.join(self._testdir, b'logexceptions.py')
2530 self.options.extra_config_opt.append(
2533 self.options.extra_config_opt.append(
2531 'extensions.logexceptions=%s' % logexceptions.decode('utf-8'))
2534 'extensions.logexceptions=%s' % logexceptions.decode('utf-8'))
2532
2535
2533 vlog("# Using TESTDIR", self._testdir)
2536 vlog("# Using TESTDIR", self._testdir)
2534 vlog("# Using RUNTESTDIR", osenvironb[b'RUNTESTDIR'])
2537 vlog("# Using RUNTESTDIR", osenvironb[b'RUNTESTDIR'])
2535 vlog("# Using HGTMP", self._hgtmp)
2538 vlog("# Using HGTMP", self._hgtmp)
2536 vlog("# Using PATH", os.environ["PATH"])
2539 vlog("# Using PATH", os.environ["PATH"])
2537 vlog("# Using", IMPL_PATH, osenvironb[IMPL_PATH])
2540 vlog("# Using", IMPL_PATH, osenvironb[IMPL_PATH])
2538 vlog("# Writing to directory", self._outputdir)
2541 vlog("# Writing to directory", self._outputdir)
2539
2542
2540 try:
2543 try:
2541 return self._runtests(testdescs) or 0
2544 return self._runtests(testdescs) or 0
2542 finally:
2545 finally:
2543 time.sleep(.1)
2546 time.sleep(.1)
2544 self._cleanup()
2547 self._cleanup()
2545
2548
2546 def findtests(self, args):
2549 def findtests(self, args):
2547 """Finds possible test files from arguments.
2550 """Finds possible test files from arguments.
2548
2551
2549 If you wish to inject custom tests into the test harness, this would
2552 If you wish to inject custom tests into the test harness, this would
2550 be a good function to monkeypatch or override in a derived class.
2553 be a good function to monkeypatch or override in a derived class.
2551 """
2554 """
2552 if not args:
2555 if not args:
2553 if self.options.changed:
2556 if self.options.changed:
2554 proc = Popen4('hg st --rev "%s" -man0 .' %
2557 proc = Popen4('hg st --rev "%s" -man0 .' %
2555 self.options.changed, None, 0)
2558 self.options.changed, None, 0)
2556 stdout, stderr = proc.communicate()
2559 stdout, stderr = proc.communicate()
2557 args = stdout.strip(b'\0').split(b'\0')
2560 args = stdout.strip(b'\0').split(b'\0')
2558 else:
2561 else:
2559 args = os.listdir(b'.')
2562 args = os.listdir(b'.')
2560
2563
2561 expanded_args = []
2564 expanded_args = []
2562 for arg in args:
2565 for arg in args:
2563 if os.path.isdir(arg):
2566 if os.path.isdir(arg):
2564 if not arg.endswith(b'/'):
2567 if not arg.endswith(b'/'):
2565 arg += b'/'
2568 arg += b'/'
2566 expanded_args.extend([arg + a for a in os.listdir(arg)])
2569 expanded_args.extend([arg + a for a in os.listdir(arg)])
2567 else:
2570 else:
2568 expanded_args.append(arg)
2571 expanded_args.append(arg)
2569 args = expanded_args
2572 args = expanded_args
2570
2573
2571 tests = []
2574 tests = []
2572 for t in args:
2575 for t in args:
2573 if not (os.path.basename(t).startswith(b'test-')
2576 if not (os.path.basename(t).startswith(b'test-')
2574 and (t.endswith(b'.py') or t.endswith(b'.t'))):
2577 and (t.endswith(b'.py') or t.endswith(b'.t'))):
2575 continue
2578 continue
2576 if t.endswith(b'.t'):
2579 if t.endswith(b'.t'):
2577 # .t file may contain multiple test cases
2580 # .t file may contain multiple test cases
2578 cases = sorted(parsettestcases(t))
2581 cases = sorted(parsettestcases(t))
2579 if cases:
2582 if cases:
2580 tests += [{'path': t, 'case': c} for c in sorted(cases)]
2583 tests += [{'path': t, 'case': c} for c in sorted(cases)]
2581 else:
2584 else:
2582 tests.append({'path': t})
2585 tests.append({'path': t})
2583 else:
2586 else:
2584 tests.append({'path': t})
2587 tests.append({'path': t})
2585 return tests
2588 return tests
2586
2589
2587 def _runtests(self, testdescs):
2590 def _runtests(self, testdescs):
2588 def _reloadtest(test, i):
2591 def _reloadtest(test, i):
2589 # convert a test back to its description dict
2592 # convert a test back to its description dict
2590 desc = {'path': test.path}
2593 desc = {'path': test.path}
2591 case = getattr(test, '_case', None)
2594 case = getattr(test, '_case', None)
2592 if case:
2595 if case:
2593 desc['case'] = case
2596 desc['case'] = case
2594 return self._gettest(desc, i)
2597 return self._gettest(desc, i)
2595
2598
2596 try:
2599 try:
2597 if self.options.restart:
2600 if self.options.restart:
2598 orig = list(testdescs)
2601 orig = list(testdescs)
2599 while testdescs:
2602 while testdescs:
2600 desc = testdescs[0]
2603 desc = testdescs[0]
2601 # desc['path'] is a relative path
2604 # desc['path'] is a relative path
2602 if 'case' in desc:
2605 if 'case' in desc:
2603 errpath = b'%s.%s.err' % (desc['path'], desc['case'])
2606 errpath = b'%s.%s.err' % (desc['path'], desc['case'])
2604 else:
2607 else:
2605 errpath = b'%s.err' % desc['path']
2608 errpath = b'%s.err' % desc['path']
2606 errpath = os.path.join(self._outputdir, errpath)
2609 errpath = os.path.join(self._outputdir, errpath)
2607 if os.path.exists(errpath):
2610 if os.path.exists(errpath):
2608 break
2611 break
2609 testdescs.pop(0)
2612 testdescs.pop(0)
2610 if not testdescs:
2613 if not testdescs:
2611 print("running all tests")
2614 print("running all tests")
2612 testdescs = orig
2615 testdescs = orig
2613
2616
2614 tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
2617 tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
2615
2618
2616 failed = False
2619 failed = False
2617 kws = self.options.keywords
2620 kws = self.options.keywords
2618 if kws is not None and PYTHON3:
2621 if kws is not None and PYTHON3:
2619 kws = kws.encode('utf-8')
2622 kws = kws.encode('utf-8')
2620
2623
2621 suite = TestSuite(self._testdir,
2624 suite = TestSuite(self._testdir,
2622 jobs=self.options.jobs,
2625 jobs=self.options.jobs,
2623 whitelist=self.options.whitelisted,
2626 whitelist=self.options.whitelisted,
2624 blacklist=self.options.blacklist,
2627 blacklist=self.options.blacklist,
2625 retest=self.options.retest,
2628 retest=self.options.retest,
2626 keywords=kws,
2629 keywords=kws,
2627 loop=self.options.loop,
2630 loop=self.options.loop,
2628 runs_per_test=self.options.runs_per_test,
2631 runs_per_test=self.options.runs_per_test,
2629 showchannels=self.options.showchannels,
2632 showchannels=self.options.showchannels,
2630 tests=tests, loadtest=_reloadtest)
2633 tests=tests, loadtest=_reloadtest)
2631 verbosity = 1
2634 verbosity = 1
2632 if self.options.verbose:
2635 if self.options.verbose:
2633 verbosity = 2
2636 verbosity = 2
2634 runner = TextTestRunner(self, verbosity=verbosity)
2637 runner = TextTestRunner(self, verbosity=verbosity)
2635
2638
2636 if self.options.list_tests:
2639 if self.options.list_tests:
2637 result = runner.listtests(suite)
2640 result = runner.listtests(suite)
2638 else:
2641 else:
2639 if self._installdir:
2642 if self._installdir:
2640 self._installhg()
2643 self._installhg()
2641 self._checkhglib("Testing")
2644 self._checkhglib("Testing")
2642 else:
2645 else:
2643 self._usecorrectpython()
2646 self._usecorrectpython()
2644 if self.options.chg:
2647 if self.options.chg:
2645 assert self._installdir
2648 assert self._installdir
2646 self._installchg()
2649 self._installchg()
2647
2650
2648 result = runner.run(suite)
2651 result = runner.run(suite)
2649
2652
2650 if result.failures:
2653 if result.failures:
2651 failed = True
2654 failed = True
2652
2655
2653 if self.options.anycoverage:
2656 if self.options.anycoverage:
2654 self._outputcoverage()
2657 self._outputcoverage()
2655 except KeyboardInterrupt:
2658 except KeyboardInterrupt:
2656 failed = True
2659 failed = True
2657 print("\ninterrupted!")
2660 print("\ninterrupted!")
2658
2661
2659 if failed:
2662 if failed:
2660 return 1
2663 return 1
2661
2664
2662 def _getport(self, count):
2665 def _getport(self, count):
2663 port = self._ports.get(count) # do we have a cached entry?
2666 port = self._ports.get(count) # do we have a cached entry?
2664 if port is None:
2667 if port is None:
2665 portneeded = 3
2668 portneeded = 3
2666 # above 100 tries we just give up and let test reports failure
2669 # above 100 tries we just give up and let test reports failure
2667 for tries in xrange(100):
2670 for tries in xrange(100):
2668 allfree = True
2671 allfree = True
2669 port = self.options.port + self._portoffset
2672 port = self.options.port + self._portoffset
2670 for idx in xrange(portneeded):
2673 for idx in xrange(portneeded):
2671 if not checkportisavailable(port + idx):
2674 if not checkportisavailable(port + idx):
2672 allfree = False
2675 allfree = False
2673 break
2676 break
2674 self._portoffset += portneeded
2677 self._portoffset += portneeded
2675 if allfree:
2678 if allfree:
2676 break
2679 break
2677 self._ports[count] = port
2680 self._ports[count] = port
2678 return port
2681 return port
2679
2682
2680 def _gettest(self, testdesc, count):
2683 def _gettest(self, testdesc, count):
2681 """Obtain a Test by looking at its filename.
2684 """Obtain a Test by looking at its filename.
2682
2685
2683 Returns a Test instance. The Test may not be runnable if it doesn't
2686 Returns a Test instance. The Test may not be runnable if it doesn't
2684 map to a known type.
2687 map to a known type.
2685 """
2688 """
2686 path = testdesc['path']
2689 path = testdesc['path']
2687 lctest = path.lower()
2690 lctest = path.lower()
2688 testcls = Test
2691 testcls = Test
2689
2692
2690 for ext, cls in self.TESTTYPES:
2693 for ext, cls in self.TESTTYPES:
2691 if lctest.endswith(ext):
2694 if lctest.endswith(ext):
2692 testcls = cls
2695 testcls = cls
2693 break
2696 break
2694
2697
2695 refpath = os.path.join(self._testdir, path)
2698 refpath = os.path.join(self._testdir, path)
2696 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
2699 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
2697
2700
2698 # extra keyword parameters. 'case' is used by .t tests
2701 # extra keyword parameters. 'case' is used by .t tests
2699 kwds = dict((k, testdesc[k]) for k in ['case'] if k in testdesc)
2702 kwds = dict((k, testdesc[k]) for k in ['case'] if k in testdesc)
2700
2703
2701 t = testcls(refpath, self._outputdir, tmpdir,
2704 t = testcls(refpath, self._outputdir, tmpdir,
2702 keeptmpdir=self.options.keep_tmpdir,
2705 keeptmpdir=self.options.keep_tmpdir,
2703 debug=self.options.debug,
2706 debug=self.options.debug,
2704 timeout=self.options.timeout,
2707 timeout=self.options.timeout,
2705 startport=self._getport(count),
2708 startport=self._getport(count),
2706 extraconfigopts=self.options.extra_config_opt,
2709 extraconfigopts=self.options.extra_config_opt,
2707 py3kwarnings=self.options.py3k_warnings,
2710 py3kwarnings=self.options.py3k_warnings,
2708 shell=self.options.shell,
2711 shell=self.options.shell,
2709 hgcommand=self._hgcommand,
2712 hgcommand=self._hgcommand,
2710 usechg=bool(self.options.with_chg or self.options.chg),
2713 usechg=bool(self.options.with_chg or self.options.chg),
2711 useipv6=useipv6, **kwds)
2714 useipv6=useipv6, **kwds)
2712 t.should_reload = True
2715 t.should_reload = True
2713 return t
2716 return t
2714
2717
2715 def _cleanup(self):
2718 def _cleanup(self):
2716 """Clean up state from this test invocation."""
2719 """Clean up state from this test invocation."""
2717 if self.options.keep_tmpdir:
2720 if self.options.keep_tmpdir:
2718 return
2721 return
2719
2722
2720 vlog("# Cleaning up HGTMP", self._hgtmp)
2723 vlog("# Cleaning up HGTMP", self._hgtmp)
2721 shutil.rmtree(self._hgtmp, True)
2724 shutil.rmtree(self._hgtmp, True)
2722 for f in self._createdfiles:
2725 for f in self._createdfiles:
2723 try:
2726 try:
2724 os.remove(f)
2727 os.remove(f)
2725 except OSError:
2728 except OSError:
2726 pass
2729 pass
2727
2730
2728 def _usecorrectpython(self):
2731 def _usecorrectpython(self):
2729 """Configure the environment to use the appropriate Python in tests."""
2732 """Configure the environment to use the appropriate Python in tests."""
2730 # Tests must use the same interpreter as us or bad things will happen.
2733 # Tests must use the same interpreter as us or bad things will happen.
2731 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
2734 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
2732 if getattr(os, 'symlink', None):
2735 if getattr(os, 'symlink', None):
2733 vlog("# Making python executable in test path a symlink to '%s'" %
2736 vlog("# Making python executable in test path a symlink to '%s'" %
2734 sys.executable)
2737 sys.executable)
2735 mypython = os.path.join(self._tmpbindir, pyexename)
2738 mypython = os.path.join(self._tmpbindir, pyexename)
2736 try:
2739 try:
2737 if os.readlink(mypython) == sys.executable:
2740 if os.readlink(mypython) == sys.executable:
2738 return
2741 return
2739 os.unlink(mypython)
2742 os.unlink(mypython)
2740 except OSError as err:
2743 except OSError as err:
2741 if err.errno != errno.ENOENT:
2744 if err.errno != errno.ENOENT:
2742 raise
2745 raise
2743 if self._findprogram(pyexename) != sys.executable:
2746 if self._findprogram(pyexename) != sys.executable:
2744 try:
2747 try:
2745 os.symlink(sys.executable, mypython)
2748 os.symlink(sys.executable, mypython)
2746 self._createdfiles.append(mypython)
2749 self._createdfiles.append(mypython)
2747 except OSError as err:
2750 except OSError as err:
2748 # child processes may race, which is harmless
2751 # child processes may race, which is harmless
2749 if err.errno != errno.EEXIST:
2752 if err.errno != errno.EEXIST:
2750 raise
2753 raise
2751 else:
2754 else:
2752 exedir, exename = os.path.split(sys.executable)
2755 exedir, exename = os.path.split(sys.executable)
2753 vlog("# Modifying search path to find %s as %s in '%s'" %
2756 vlog("# Modifying search path to find %s as %s in '%s'" %
2754 (exename, pyexename, exedir))
2757 (exename, pyexename, exedir))
2755 path = os.environ['PATH'].split(os.pathsep)
2758 path = os.environ['PATH'].split(os.pathsep)
2756 while exedir in path:
2759 while exedir in path:
2757 path.remove(exedir)
2760 path.remove(exedir)
2758 os.environ['PATH'] = os.pathsep.join([exedir] + path)
2761 os.environ['PATH'] = os.pathsep.join([exedir] + path)
2759 if not self._findprogram(pyexename):
2762 if not self._findprogram(pyexename):
2760 print("WARNING: Cannot find %s in search path" % pyexename)
2763 print("WARNING: Cannot find %s in search path" % pyexename)
2761
2764
2762 def _installhg(self):
2765 def _installhg(self):
2763 """Install hg into the test environment.
2766 """Install hg into the test environment.
2764
2767
2765 This will also configure hg with the appropriate testing settings.
2768 This will also configure hg with the appropriate testing settings.
2766 """
2769 """
2767 vlog("# Performing temporary installation of HG")
2770 vlog("# Performing temporary installation of HG")
2768 installerrs = os.path.join(self._hgtmp, b"install.err")
2771 installerrs = os.path.join(self._hgtmp, b"install.err")
2769 compiler = ''
2772 compiler = ''
2770 if self.options.compiler:
2773 if self.options.compiler:
2771 compiler = '--compiler ' + self.options.compiler
2774 compiler = '--compiler ' + self.options.compiler
2772 if self.options.pure:
2775 if self.options.pure:
2773 pure = b"--pure"
2776 pure = b"--pure"
2774 else:
2777 else:
2775 pure = b""
2778 pure = b""
2776
2779
2777 # Run installer in hg root
2780 # Run installer in hg root
2778 script = os.path.realpath(sys.argv[0])
2781 script = os.path.realpath(sys.argv[0])
2779 exe = sys.executable
2782 exe = sys.executable
2780 if PYTHON3:
2783 if PYTHON3:
2781 compiler = _bytespath(compiler)
2784 compiler = _bytespath(compiler)
2782 script = _bytespath(script)
2785 script = _bytespath(script)
2783 exe = _bytespath(exe)
2786 exe = _bytespath(exe)
2784 hgroot = os.path.dirname(os.path.dirname(script))
2787 hgroot = os.path.dirname(os.path.dirname(script))
2785 self._hgroot = hgroot
2788 self._hgroot = hgroot
2786 os.chdir(hgroot)
2789 os.chdir(hgroot)
2787 nohome = b'--home=""'
2790 nohome = b'--home=""'
2788 if os.name == 'nt':
2791 if os.name == 'nt':
2789 # The --home="" trick works only on OS where os.sep == '/'
2792 # The --home="" trick works only on OS where os.sep == '/'
2790 # because of a distutils convert_path() fast-path. Avoid it at
2793 # because of a distutils convert_path() fast-path. Avoid it at
2791 # least on Windows for now, deal with .pydistutils.cfg bugs
2794 # least on Windows for now, deal with .pydistutils.cfg bugs
2792 # when they happen.
2795 # when they happen.
2793 nohome = b''
2796 nohome = b''
2794 cmd = (b'%(exe)s setup.py %(pure)s clean --all'
2797 cmd = (b'%(exe)s setup.py %(pure)s clean --all'
2795 b' build %(compiler)s --build-base="%(base)s"'
2798 b' build %(compiler)s --build-base="%(base)s"'
2796 b' install --force --prefix="%(prefix)s"'
2799 b' install --force --prefix="%(prefix)s"'
2797 b' --install-lib="%(libdir)s"'
2800 b' --install-lib="%(libdir)s"'
2798 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
2801 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
2799 % {b'exe': exe, b'pure': pure,
2802 % {b'exe': exe, b'pure': pure,
2800 b'compiler': compiler,
2803 b'compiler': compiler,
2801 b'base': os.path.join(self._hgtmp, b"build"),
2804 b'base': os.path.join(self._hgtmp, b"build"),
2802 b'prefix': self._installdir, b'libdir': self._pythondir,
2805 b'prefix': self._installdir, b'libdir': self._pythondir,
2803 b'bindir': self._bindir,
2806 b'bindir': self._bindir,
2804 b'nohome': nohome, b'logfile': installerrs})
2807 b'nohome': nohome, b'logfile': installerrs})
2805
2808
2806 # setuptools requires install directories to exist.
2809 # setuptools requires install directories to exist.
2807 def makedirs(p):
2810 def makedirs(p):
2808 try:
2811 try:
2809 os.makedirs(p)
2812 os.makedirs(p)
2810 except OSError as e:
2813 except OSError as e:
2811 if e.errno != errno.EEXIST:
2814 if e.errno != errno.EEXIST:
2812 raise
2815 raise
2813 makedirs(self._pythondir)
2816 makedirs(self._pythondir)
2814 makedirs(self._bindir)
2817 makedirs(self._bindir)
2815
2818
2816 vlog("# Running", cmd)
2819 vlog("# Running", cmd)
2817 if os.system(cmd) == 0:
2820 if os.system(cmd) == 0:
2818 if not self.options.verbose:
2821 if not self.options.verbose:
2819 try:
2822 try:
2820 os.remove(installerrs)
2823 os.remove(installerrs)
2821 except OSError as e:
2824 except OSError as e:
2822 if e.errno != errno.ENOENT:
2825 if e.errno != errno.ENOENT:
2823 raise
2826 raise
2824 else:
2827 else:
2825 f = open(installerrs, 'rb')
2828 f = open(installerrs, 'rb')
2826 for line in f:
2829 for line in f:
2827 if PYTHON3:
2830 if PYTHON3:
2828 sys.stdout.buffer.write(line)
2831 sys.stdout.buffer.write(line)
2829 else:
2832 else:
2830 sys.stdout.write(line)
2833 sys.stdout.write(line)
2831 f.close()
2834 f.close()
2832 sys.exit(1)
2835 sys.exit(1)
2833 os.chdir(self._testdir)
2836 os.chdir(self._testdir)
2834
2837
2835 self._usecorrectpython()
2838 self._usecorrectpython()
2836
2839
2837 if self.options.py3k_warnings and not self.options.anycoverage:
2840 if self.options.py3k_warnings and not self.options.anycoverage:
2838 vlog("# Updating hg command to enable Py3k Warnings switch")
2841 vlog("# Updating hg command to enable Py3k Warnings switch")
2839 f = open(os.path.join(self._bindir, 'hg'), 'rb')
2842 f = open(os.path.join(self._bindir, 'hg'), 'rb')
2840 lines = [line.rstrip() for line in f]
2843 lines = [line.rstrip() for line in f]
2841 lines[0] += ' -3'
2844 lines[0] += ' -3'
2842 f.close()
2845 f.close()
2843 f = open(os.path.join(self._bindir, 'hg'), 'wb')
2846 f = open(os.path.join(self._bindir, 'hg'), 'wb')
2844 for line in lines:
2847 for line in lines:
2845 f.write(line + '\n')
2848 f.write(line + '\n')
2846 f.close()
2849 f.close()
2847
2850
2848 hgbat = os.path.join(self._bindir, b'hg.bat')
2851 hgbat = os.path.join(self._bindir, b'hg.bat')
2849 if os.path.isfile(hgbat):
2852 if os.path.isfile(hgbat):
2850 # hg.bat expects to be put in bin/scripts while run-tests.py
2853 # hg.bat expects to be put in bin/scripts while run-tests.py
2851 # installation layout put it in bin/ directly. Fix it
2854 # installation layout put it in bin/ directly. Fix it
2852 f = open(hgbat, 'rb')
2855 f = open(hgbat, 'rb')
2853 data = f.read()
2856 data = f.read()
2854 f.close()
2857 f.close()
2855 if b'"%~dp0..\python" "%~dp0hg" %*' in data:
2858 if b'"%~dp0..\python" "%~dp0hg" %*' in data:
2856 data = data.replace(b'"%~dp0..\python" "%~dp0hg" %*',
2859 data = data.replace(b'"%~dp0..\python" "%~dp0hg" %*',
2857 b'"%~dp0python" "%~dp0hg" %*')
2860 b'"%~dp0python" "%~dp0hg" %*')
2858 f = open(hgbat, 'wb')
2861 f = open(hgbat, 'wb')
2859 f.write(data)
2862 f.write(data)
2860 f.close()
2863 f.close()
2861 else:
2864 else:
2862 print('WARNING: cannot fix hg.bat reference to python.exe')
2865 print('WARNING: cannot fix hg.bat reference to python.exe')
2863
2866
2864 if self.options.anycoverage:
2867 if self.options.anycoverage:
2865 custom = os.path.join(self._testdir, 'sitecustomize.py')
2868 custom = os.path.join(self._testdir, 'sitecustomize.py')
2866 target = os.path.join(self._pythondir, 'sitecustomize.py')
2869 target = os.path.join(self._pythondir, 'sitecustomize.py')
2867 vlog('# Installing coverage trigger to %s' % target)
2870 vlog('# Installing coverage trigger to %s' % target)
2868 shutil.copyfile(custom, target)
2871 shutil.copyfile(custom, target)
2869 rc = os.path.join(self._testdir, '.coveragerc')
2872 rc = os.path.join(self._testdir, '.coveragerc')
2870 vlog('# Installing coverage rc to %s' % rc)
2873 vlog('# Installing coverage rc to %s' % rc)
2871 os.environ['COVERAGE_PROCESS_START'] = rc
2874 os.environ['COVERAGE_PROCESS_START'] = rc
2872 covdir = os.path.join(self._installdir, '..', 'coverage')
2875 covdir = os.path.join(self._installdir, '..', 'coverage')
2873 try:
2876 try:
2874 os.mkdir(covdir)
2877 os.mkdir(covdir)
2875 except OSError as e:
2878 except OSError as e:
2876 if e.errno != errno.EEXIST:
2879 if e.errno != errno.EEXIST:
2877 raise
2880 raise
2878
2881
2879 os.environ['COVERAGE_DIR'] = covdir
2882 os.environ['COVERAGE_DIR'] = covdir
2880
2883
2881 def _checkhglib(self, verb):
2884 def _checkhglib(self, verb):
2882 """Ensure that the 'mercurial' package imported by python is
2885 """Ensure that the 'mercurial' package imported by python is
2883 the one we expect it to be. If not, print a warning to stderr."""
2886 the one we expect it to be. If not, print a warning to stderr."""
2884 if ((self._bindir == self._pythondir) and
2887 if ((self._bindir == self._pythondir) and
2885 (self._bindir != self._tmpbindir)):
2888 (self._bindir != self._tmpbindir)):
2886 # The pythondir has been inferred from --with-hg flag.
2889 # The pythondir has been inferred from --with-hg flag.
2887 # We cannot expect anything sensible here.
2890 # We cannot expect anything sensible here.
2888 return
2891 return
2889 expecthg = os.path.join(self._pythondir, b'mercurial')
2892 expecthg = os.path.join(self._pythondir, b'mercurial')
2890 actualhg = self._gethgpath()
2893 actualhg = self._gethgpath()
2891 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
2894 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
2892 sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
2895 sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
2893 ' (expected %s)\n'
2896 ' (expected %s)\n'
2894 % (verb, actualhg, expecthg))
2897 % (verb, actualhg, expecthg))
2895 def _gethgpath(self):
2898 def _gethgpath(self):
2896 """Return the path to the mercurial package that is actually found by
2899 """Return the path to the mercurial package that is actually found by
2897 the current Python interpreter."""
2900 the current Python interpreter."""
2898 if self._hgpath is not None:
2901 if self._hgpath is not None:
2899 return self._hgpath
2902 return self._hgpath
2900
2903
2901 cmd = b'%s -c "import mercurial; print (mercurial.__path__[0])"'
2904 cmd = b'%s -c "import mercurial; print (mercurial.__path__[0])"'
2902 cmd = cmd % PYTHON
2905 cmd = cmd % PYTHON
2903 if PYTHON3:
2906 if PYTHON3:
2904 cmd = _strpath(cmd)
2907 cmd = _strpath(cmd)
2905 pipe = os.popen(cmd)
2908 pipe = os.popen(cmd)
2906 try:
2909 try:
2907 self._hgpath = _bytespath(pipe.read().strip())
2910 self._hgpath = _bytespath(pipe.read().strip())
2908 finally:
2911 finally:
2909 pipe.close()
2912 pipe.close()
2910
2913
2911 return self._hgpath
2914 return self._hgpath
2912
2915
2913 def _installchg(self):
2916 def _installchg(self):
2914 """Install chg into the test environment"""
2917 """Install chg into the test environment"""
2915 vlog('# Performing temporary installation of CHG')
2918 vlog('# Performing temporary installation of CHG')
2916 assert os.path.dirname(self._bindir) == self._installdir
2919 assert os.path.dirname(self._bindir) == self._installdir
2917 assert self._hgroot, 'must be called after _installhg()'
2920 assert self._hgroot, 'must be called after _installhg()'
2918 cmd = (b'"%(make)s" clean install PREFIX="%(prefix)s"'
2921 cmd = (b'"%(make)s" clean install PREFIX="%(prefix)s"'
2919 % {b'make': 'make', # TODO: switch by option or environment?
2922 % {b'make': 'make', # TODO: switch by option or environment?
2920 b'prefix': self._installdir})
2923 b'prefix': self._installdir})
2921 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
2924 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
2922 vlog("# Running", cmd)
2925 vlog("# Running", cmd)
2923 proc = subprocess.Popen(cmd, shell=True, cwd=cwd,
2926 proc = subprocess.Popen(cmd, shell=True, cwd=cwd,
2924 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
2927 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
2925 stderr=subprocess.STDOUT)
2928 stderr=subprocess.STDOUT)
2926 out, _err = proc.communicate()
2929 out, _err = proc.communicate()
2927 if proc.returncode != 0:
2930 if proc.returncode != 0:
2928 if PYTHON3:
2931 if PYTHON3:
2929 sys.stdout.buffer.write(out)
2932 sys.stdout.buffer.write(out)
2930 else:
2933 else:
2931 sys.stdout.write(out)
2934 sys.stdout.write(out)
2932 sys.exit(1)
2935 sys.exit(1)
2933
2936
2934 def _outputcoverage(self):
2937 def _outputcoverage(self):
2935 """Produce code coverage output."""
2938 """Produce code coverage output."""
2936 import coverage
2939 import coverage
2937 coverage = coverage.coverage
2940 coverage = coverage.coverage
2938
2941
2939 vlog('# Producing coverage report')
2942 vlog('# Producing coverage report')
2940 # chdir is the easiest way to get short, relative paths in the
2943 # chdir is the easiest way to get short, relative paths in the
2941 # output.
2944 # output.
2942 os.chdir(self._hgroot)
2945 os.chdir(self._hgroot)
2943 covdir = os.path.join(self._installdir, '..', 'coverage')
2946 covdir = os.path.join(self._installdir, '..', 'coverage')
2944 cov = coverage(data_file=os.path.join(covdir, 'cov'))
2947 cov = coverage(data_file=os.path.join(covdir, 'cov'))
2945
2948
2946 # Map install directory paths back to source directory.
2949 # Map install directory paths back to source directory.
2947 cov.config.paths['srcdir'] = ['.', self._pythondir]
2950 cov.config.paths['srcdir'] = ['.', self._pythondir]
2948
2951
2949 cov.combine()
2952 cov.combine()
2950
2953
2951 omit = [os.path.join(x, '*') for x in [self._bindir, self._testdir]]
2954 omit = [os.path.join(x, '*') for x in [self._bindir, self._testdir]]
2952 cov.report(ignore_errors=True, omit=omit)
2955 cov.report(ignore_errors=True, omit=omit)
2953
2956
2954 if self.options.htmlcov:
2957 if self.options.htmlcov:
2955 htmldir = os.path.join(self._outputdir, 'htmlcov')
2958 htmldir = os.path.join(self._outputdir, 'htmlcov')
2956 cov.html_report(directory=htmldir, omit=omit)
2959 cov.html_report(directory=htmldir, omit=omit)
2957 if self.options.annotate:
2960 if self.options.annotate:
2958 adir = os.path.join(self._outputdir, 'annotated')
2961 adir = os.path.join(self._outputdir, 'annotated')
2959 if not os.path.isdir(adir):
2962 if not os.path.isdir(adir):
2960 os.mkdir(adir)
2963 os.mkdir(adir)
2961 cov.annotate(directory=adir, omit=omit)
2964 cov.annotate(directory=adir, omit=omit)
2962
2965
2963 def _findprogram(self, program):
2966 def _findprogram(self, program):
2964 """Search PATH for a executable program"""
2967 """Search PATH for a executable program"""
2965 dpb = _bytespath(os.defpath)
2968 dpb = _bytespath(os.defpath)
2966 sepb = _bytespath(os.pathsep)
2969 sepb = _bytespath(os.pathsep)
2967 for p in osenvironb.get(b'PATH', dpb).split(sepb):
2970 for p in osenvironb.get(b'PATH', dpb).split(sepb):
2968 name = os.path.join(p, program)
2971 name = os.path.join(p, program)
2969 if os.name == 'nt' or os.access(name, os.X_OK):
2972 if os.name == 'nt' or os.access(name, os.X_OK):
2970 return name
2973 return name
2971 return None
2974 return None
2972
2975
2973 def _checktools(self):
2976 def _checktools(self):
2974 """Ensure tools required to run tests are present."""
2977 """Ensure tools required to run tests are present."""
2975 for p in self.REQUIREDTOOLS:
2978 for p in self.REQUIREDTOOLS:
2976 if os.name == 'nt' and not p.endswith('.exe'):
2979 if os.name == 'nt' and not p.endswith('.exe'):
2977 p += '.exe'
2980 p += '.exe'
2978 found = self._findprogram(p)
2981 found = self._findprogram(p)
2979 if found:
2982 if found:
2980 vlog("# Found prerequisite", p, "at", found)
2983 vlog("# Found prerequisite", p, "at", found)
2981 else:
2984 else:
2982 print("WARNING: Did not find prerequisite tool: %s " %
2985 print("WARNING: Did not find prerequisite tool: %s " %
2983 p.decode("utf-8"))
2986 p.decode("utf-8"))
2984
2987
2985 def aggregateexceptions(path):
2988 def aggregateexceptions(path):
2986 exceptions = collections.Counter()
2989 exceptions = collections.Counter()
2987
2990
2988 for f in os.listdir(path):
2991 for f in os.listdir(path):
2989 with open(os.path.join(path, f), 'rb') as fh:
2992 with open(os.path.join(path, f), 'rb') as fh:
2990 data = fh.read().split(b'\0')
2993 data = fh.read().split(b'\0')
2991 if len(data) != 4:
2994 if len(data) != 4:
2992 continue
2995 continue
2993
2996
2994 exc, mainframe, hgframe, hgline = data
2997 exc, mainframe, hgframe, hgline = data
2995 exc = exc.decode('utf-8')
2998 exc = exc.decode('utf-8')
2996 mainframe = mainframe.decode('utf-8')
2999 mainframe = mainframe.decode('utf-8')
2997 hgframe = hgframe.decode('utf-8')
3000 hgframe = hgframe.decode('utf-8')
2998 hgline = hgline.decode('utf-8')
3001 hgline = hgline.decode('utf-8')
2999 exceptions[(hgframe, hgline, exc)] += 1
3002 exceptions[(hgframe, hgline, exc)] += 1
3000
3003
3001 return exceptions
3004 return exceptions
3002
3005
3003 if __name__ == '__main__':
3006 if __name__ == '__main__':
3004 runner = TestRunner()
3007 runner = TestRunner()
3005
3008
3006 try:
3009 try:
3007 import msvcrt
3010 import msvcrt
3008 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
3011 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
3009 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
3012 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
3010 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
3013 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
3011 except ImportError:
3014 except ImportError:
3012 pass
3015 pass
3013
3016
3014 sys.exit(runner.run(sys.argv[1:]))
3017 sys.exit(runner.run(sys.argv[1:]))
@@ -1,99 +1,100 b''
1 Create a repository:
1 Create a repository:
2
2
3 $ hg config
3 $ hg config
4 devel.all-warnings=true
4 devel.all-warnings=true
5 devel.default-date=0 0
5 devel.default-date=0 0
6 extensions.fsmonitor= (fsmonitor !)
6 extensions.fsmonitor= (fsmonitor !)
7 largefiles.usercache=$TESTTMP/.cache/largefiles (glob)
7 largefiles.usercache=$TESTTMP/.cache/largefiles (glob)
8 lfs.usercache=$TESTTMP/.cache/lfs (glob)
8 ui.slash=True
9 ui.slash=True
9 ui.interactive=False
10 ui.interactive=False
10 ui.mergemarkers=detailed
11 ui.mergemarkers=detailed
11 ui.promptecho=True
12 ui.promptecho=True
12 web.address=localhost
13 web.address=localhost
13 web\.ipv6=(?:True|False) (re)
14 web\.ipv6=(?:True|False) (re)
14 $ hg init t
15 $ hg init t
15 $ cd t
16 $ cd t
16
17
17 Prepare a changeset:
18 Prepare a changeset:
18
19
19 $ echo a > a
20 $ echo a > a
20 $ hg add a
21 $ hg add a
21
22
22 $ hg status
23 $ hg status
23 A a
24 A a
24
25
25 Writes to stdio succeed and fail appropriately
26 Writes to stdio succeed and fail appropriately
26
27
27 #if devfull
28 #if devfull
28 $ hg status 2>/dev/full
29 $ hg status 2>/dev/full
29 A a
30 A a
30
31
31 $ hg status >/dev/full
32 $ hg status >/dev/full
32 abort: No space left on device
33 abort: No space left on device
33 [255]
34 [255]
34 #endif
35 #endif
35
36
36 #if devfull no-chg
37 #if devfull no-chg
37 $ hg status >/dev/full 2>&1
38 $ hg status >/dev/full 2>&1
38 [1]
39 [1]
39
40
40 $ hg status ENOENT 2>/dev/full
41 $ hg status ENOENT 2>/dev/full
41 [1]
42 [1]
42 #endif
43 #endif
43
44
44 #if devfull chg
45 #if devfull chg
45 $ hg status >/dev/full 2>&1
46 $ hg status >/dev/full 2>&1
46 [255]
47 [255]
47
48
48 $ hg status ENOENT 2>/dev/full
49 $ hg status ENOENT 2>/dev/full
49 [255]
50 [255]
50 #endif
51 #endif
51
52
52 $ hg commit -m test
53 $ hg commit -m test
53
54
54 This command is ancient:
55 This command is ancient:
55
56
56 $ hg history
57 $ hg history
57 changeset: 0:acb14030fe0a
58 changeset: 0:acb14030fe0a
58 tag: tip
59 tag: tip
59 user: test
60 user: test
60 date: Thu Jan 01 00:00:00 1970 +0000
61 date: Thu Jan 01 00:00:00 1970 +0000
61 summary: test
62 summary: test
62
63
63
64
64 Verify that updating to revision 0 via commands.update() works properly
65 Verify that updating to revision 0 via commands.update() works properly
65
66
66 $ cat <<EOF > update_to_rev0.py
67 $ cat <<EOF > update_to_rev0.py
67 > from mercurial import ui, hg, commands
68 > from mercurial import ui, hg, commands
68 > myui = ui.ui.load()
69 > myui = ui.ui.load()
69 > repo = hg.repository(myui, path='.')
70 > repo = hg.repository(myui, path='.')
70 > commands.update(myui, repo, rev=0)
71 > commands.update(myui, repo, rev=0)
71 > EOF
72 > EOF
72 $ hg up null
73 $ hg up null
73 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
74 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
74 $ $PYTHON ./update_to_rev0.py
75 $ $PYTHON ./update_to_rev0.py
75 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
76 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
76 $ hg identify -n
77 $ hg identify -n
77 0
78 0
78
79
79
80
80 Poke around at hashes:
81 Poke around at hashes:
81
82
82 $ hg manifest --debug
83 $ hg manifest --debug
83 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
84 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
84
85
85 $ hg cat a
86 $ hg cat a
86 a
87 a
87
88
88 Verify should succeed:
89 Verify should succeed:
89
90
90 $ hg verify
91 $ hg verify
91 checking changesets
92 checking changesets
92 checking manifests
93 checking manifests
93 crosschecking files in changesets and manifests
94 crosschecking files in changesets and manifests
94 checking files
95 checking files
95 1 files, 1 changesets, 1 total revisions
96 1 files, 1 changesets, 1 total revisions
96
97
97 At the end...
98 At the end...
98
99
99 $ cd ..
100 $ cd ..
@@ -1,1023 +1,1024 b''
1 #if windows
1 #if windows
2 $ PYTHONPATH="$TESTDIR/../contrib;$PYTHONPATH"
2 $ PYTHONPATH="$TESTDIR/../contrib;$PYTHONPATH"
3 #else
3 #else
4 $ PYTHONPATH="$TESTDIR/../contrib:$PYTHONPATH"
4 $ PYTHONPATH="$TESTDIR/../contrib:$PYTHONPATH"
5 #endif
5 #endif
6 $ export PYTHONPATH
6 $ export PYTHONPATH
7
7
8 typical client does not want echo-back messages, so test without it:
8 typical client does not want echo-back messages, so test without it:
9
9
10 $ grep -v '^promptecho ' < $HGRCPATH >> $HGRCPATH.new
10 $ grep -v '^promptecho ' < $HGRCPATH >> $HGRCPATH.new
11 $ mv $HGRCPATH.new $HGRCPATH
11 $ mv $HGRCPATH.new $HGRCPATH
12
12
13 $ hg init repo
13 $ hg init repo
14 $ cd repo
14 $ cd repo
15
15
16 >>> from __future__ import absolute_import, print_function
16 >>> from __future__ import absolute_import, print_function
17 >>> import os
17 >>> import os
18 >>> import sys
18 >>> import sys
19 >>> from hgclient import check, readchannel, runcommand
19 >>> from hgclient import check, readchannel, runcommand
20 >>> @check
20 >>> @check
21 ... def hellomessage(server):
21 ... def hellomessage(server):
22 ... ch, data = readchannel(server)
22 ... ch, data = readchannel(server)
23 ... print('%c, %r' % (ch, data))
23 ... print('%c, %r' % (ch, data))
24 ... # run an arbitrary command to make sure the next thing the server
24 ... # run an arbitrary command to make sure the next thing the server
25 ... # sends isn't part of the hello message
25 ... # sends isn't part of the hello message
26 ... runcommand(server, ['id'])
26 ... runcommand(server, ['id'])
27 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
27 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
28 *** runcommand id
28 *** runcommand id
29 000000000000 tip
29 000000000000 tip
30
30
31 >>> from hgclient import check
31 >>> from hgclient import check
32 >>> @check
32 >>> @check
33 ... def unknowncommand(server):
33 ... def unknowncommand(server):
34 ... server.stdin.write('unknowncommand\n')
34 ... server.stdin.write('unknowncommand\n')
35 abort: unknown command unknowncommand
35 abort: unknown command unknowncommand
36
36
37 >>> from hgclient import check, readchannel, runcommand
37 >>> from hgclient import check, readchannel, runcommand
38 >>> @check
38 >>> @check
39 ... def checkruncommand(server):
39 ... def checkruncommand(server):
40 ... # hello block
40 ... # hello block
41 ... readchannel(server)
41 ... readchannel(server)
42 ...
42 ...
43 ... # no args
43 ... # no args
44 ... runcommand(server, [])
44 ... runcommand(server, [])
45 ...
45 ...
46 ... # global options
46 ... # global options
47 ... runcommand(server, ['id', '--quiet'])
47 ... runcommand(server, ['id', '--quiet'])
48 ...
48 ...
49 ... # make sure global options don't stick through requests
49 ... # make sure global options don't stick through requests
50 ... runcommand(server, ['id'])
50 ... runcommand(server, ['id'])
51 ...
51 ...
52 ... # --config
52 ... # --config
53 ... runcommand(server, ['id', '--config', 'ui.quiet=True'])
53 ... runcommand(server, ['id', '--config', 'ui.quiet=True'])
54 ...
54 ...
55 ... # make sure --config doesn't stick
55 ... # make sure --config doesn't stick
56 ... runcommand(server, ['id'])
56 ... runcommand(server, ['id'])
57 ...
57 ...
58 ... # negative return code should be masked
58 ... # negative return code should be masked
59 ... runcommand(server, ['id', '-runknown'])
59 ... runcommand(server, ['id', '-runknown'])
60 *** runcommand
60 *** runcommand
61 Mercurial Distributed SCM
61 Mercurial Distributed SCM
62
62
63 basic commands:
63 basic commands:
64
64
65 add add the specified files on the next commit
65 add add the specified files on the next commit
66 annotate show changeset information by line for each file
66 annotate show changeset information by line for each file
67 clone make a copy of an existing repository
67 clone make a copy of an existing repository
68 commit commit the specified files or all outstanding changes
68 commit commit the specified files or all outstanding changes
69 diff diff repository (or selected files)
69 diff diff repository (or selected files)
70 export dump the header and diffs for one or more changesets
70 export dump the header and diffs for one or more changesets
71 forget forget the specified files on the next commit
71 forget forget the specified files on the next commit
72 init create a new repository in the given directory
72 init create a new repository in the given directory
73 log show revision history of entire repository or files
73 log show revision history of entire repository or files
74 merge merge another revision into working directory
74 merge merge another revision into working directory
75 pull pull changes from the specified source
75 pull pull changes from the specified source
76 push push changes to the specified destination
76 push push changes to the specified destination
77 remove remove the specified files on the next commit
77 remove remove the specified files on the next commit
78 serve start stand-alone webserver
78 serve start stand-alone webserver
79 status show changed files in the working directory
79 status show changed files in the working directory
80 summary summarize working directory state
80 summary summarize working directory state
81 update update working directory (or switch revisions)
81 update update working directory (or switch revisions)
82
82
83 (use 'hg help' for the full list of commands or 'hg -v' for details)
83 (use 'hg help' for the full list of commands or 'hg -v' for details)
84 *** runcommand id --quiet
84 *** runcommand id --quiet
85 000000000000
85 000000000000
86 *** runcommand id
86 *** runcommand id
87 000000000000 tip
87 000000000000 tip
88 *** runcommand id --config ui.quiet=True
88 *** runcommand id --config ui.quiet=True
89 000000000000
89 000000000000
90 *** runcommand id
90 *** runcommand id
91 000000000000 tip
91 000000000000 tip
92 *** runcommand id -runknown
92 *** runcommand id -runknown
93 abort: unknown revision 'unknown'!
93 abort: unknown revision 'unknown'!
94 [255]
94 [255]
95
95
96 >>> from hgclient import check, readchannel
96 >>> from hgclient import check, readchannel
97 >>> @check
97 >>> @check
98 ... def inputeof(server):
98 ... def inputeof(server):
99 ... readchannel(server)
99 ... readchannel(server)
100 ... server.stdin.write('runcommand\n')
100 ... server.stdin.write('runcommand\n')
101 ... # close stdin while server is waiting for input
101 ... # close stdin while server is waiting for input
102 ... server.stdin.close()
102 ... server.stdin.close()
103 ...
103 ...
104 ... # server exits with 1 if the pipe closed while reading the command
104 ... # server exits with 1 if the pipe closed while reading the command
105 ... print('server exit code =', server.wait())
105 ... print('server exit code =', server.wait())
106 server exit code = 1
106 server exit code = 1
107
107
108 >>> from hgclient import check, readchannel, runcommand, stringio
108 >>> from hgclient import check, readchannel, runcommand, stringio
109 >>> @check
109 >>> @check
110 ... def serverinput(server):
110 ... def serverinput(server):
111 ... readchannel(server)
111 ... readchannel(server)
112 ...
112 ...
113 ... patch = """
113 ... patch = """
114 ... # HG changeset patch
114 ... # HG changeset patch
115 ... # User test
115 ... # User test
116 ... # Date 0 0
116 ... # Date 0 0
117 ... # Node ID c103a3dec114d882c98382d684d8af798d09d857
117 ... # Node ID c103a3dec114d882c98382d684d8af798d09d857
118 ... # Parent 0000000000000000000000000000000000000000
118 ... # Parent 0000000000000000000000000000000000000000
119 ... 1
119 ... 1
120 ...
120 ...
121 ... diff -r 000000000000 -r c103a3dec114 a
121 ... diff -r 000000000000 -r c103a3dec114 a
122 ... --- /dev/null Thu Jan 01 00:00:00 1970 +0000
122 ... --- /dev/null Thu Jan 01 00:00:00 1970 +0000
123 ... +++ b/a Thu Jan 01 00:00:00 1970 +0000
123 ... +++ b/a Thu Jan 01 00:00:00 1970 +0000
124 ... @@ -0,0 +1,1 @@
124 ... @@ -0,0 +1,1 @@
125 ... +1
125 ... +1
126 ... """
126 ... """
127 ...
127 ...
128 ... runcommand(server, ['import', '-'], input=stringio(patch))
128 ... runcommand(server, ['import', '-'], input=stringio(patch))
129 ... runcommand(server, ['log'])
129 ... runcommand(server, ['log'])
130 *** runcommand import -
130 *** runcommand import -
131 applying patch from stdin
131 applying patch from stdin
132 *** runcommand log
132 *** runcommand log
133 changeset: 0:eff892de26ec
133 changeset: 0:eff892de26ec
134 tag: tip
134 tag: tip
135 user: test
135 user: test
136 date: Thu Jan 01 00:00:00 1970 +0000
136 date: Thu Jan 01 00:00:00 1970 +0000
137 summary: 1
137 summary: 1
138
138
139
139
140 check strict parsing of early options:
140 check strict parsing of early options:
141
141
142 >>> import os
142 >>> import os
143 >>> from hgclient import check, readchannel, runcommand
143 >>> from hgclient import check, readchannel, runcommand
144 >>> os.environ['HGPLAIN'] = '+strictflags'
144 >>> os.environ['HGPLAIN'] = '+strictflags'
145 >>> @check
145 >>> @check
146 ... def cwd(server):
146 ... def cwd(server):
147 ... readchannel(server)
147 ... readchannel(server)
148 ... runcommand(server, ['log', '-b', '--config=alias.log=!echo pwned',
148 ... runcommand(server, ['log', '-b', '--config=alias.log=!echo pwned',
149 ... 'default'])
149 ... 'default'])
150 *** runcommand log -b --config=alias.log=!echo pwned default
150 *** runcommand log -b --config=alias.log=!echo pwned default
151 abort: unknown revision '--config=alias.log=!echo pwned'!
151 abort: unknown revision '--config=alias.log=!echo pwned'!
152 [255]
152 [255]
153
153
154 check that "histedit --commands=-" can read rules from the input channel:
154 check that "histedit --commands=-" can read rules from the input channel:
155
155
156 >>> import cStringIO
156 >>> import cStringIO
157 >>> from hgclient import check, readchannel, runcommand
157 >>> from hgclient import check, readchannel, runcommand
158 >>> @check
158 >>> @check
159 ... def serverinput(server):
159 ... def serverinput(server):
160 ... readchannel(server)
160 ... readchannel(server)
161 ... rules = 'pick eff892de26ec\n'
161 ... rules = 'pick eff892de26ec\n'
162 ... runcommand(server, ['histedit', '0', '--commands=-',
162 ... runcommand(server, ['histedit', '0', '--commands=-',
163 ... '--config', 'extensions.histedit='],
163 ... '--config', 'extensions.histedit='],
164 ... input=cStringIO.StringIO(rules))
164 ... input=cStringIO.StringIO(rules))
165 *** runcommand histedit 0 --commands=- --config extensions.histedit=
165 *** runcommand histedit 0 --commands=- --config extensions.histedit=
166
166
167 check that --cwd doesn't persist between requests:
167 check that --cwd doesn't persist between requests:
168
168
169 $ mkdir foo
169 $ mkdir foo
170 $ touch foo/bar
170 $ touch foo/bar
171 >>> from hgclient import check, readchannel, runcommand
171 >>> from hgclient import check, readchannel, runcommand
172 >>> @check
172 >>> @check
173 ... def cwd(server):
173 ... def cwd(server):
174 ... readchannel(server)
174 ... readchannel(server)
175 ... runcommand(server, ['--cwd', 'foo', 'st', 'bar'])
175 ... runcommand(server, ['--cwd', 'foo', 'st', 'bar'])
176 ... runcommand(server, ['st', 'foo/bar'])
176 ... runcommand(server, ['st', 'foo/bar'])
177 *** runcommand --cwd foo st bar
177 *** runcommand --cwd foo st bar
178 ? bar
178 ? bar
179 *** runcommand st foo/bar
179 *** runcommand st foo/bar
180 ? foo/bar
180 ? foo/bar
181
181
182 $ rm foo/bar
182 $ rm foo/bar
183
183
184
184
185 check that local configs for the cached repo aren't inherited when -R is used:
185 check that local configs for the cached repo aren't inherited when -R is used:
186
186
187 $ cat <<EOF >> .hg/hgrc
187 $ cat <<EOF >> .hg/hgrc
188 > [ui]
188 > [ui]
189 > foo = bar
189 > foo = bar
190 > EOF
190 > EOF
191
191
192 >>> from hgclient import check, readchannel, runcommand, sep
192 >>> from hgclient import check, readchannel, runcommand, sep
193 >>> @check
193 >>> @check
194 ... def localhgrc(server):
194 ... def localhgrc(server):
195 ... readchannel(server)
195 ... readchannel(server)
196 ...
196 ...
197 ... # the cached repo local hgrc contains ui.foo=bar, so showconfig should
197 ... # the cached repo local hgrc contains ui.foo=bar, so showconfig should
198 ... # show it
198 ... # show it
199 ... runcommand(server, ['showconfig'], outfilter=sep)
199 ... runcommand(server, ['showconfig'], outfilter=sep)
200 ...
200 ...
201 ... # but not for this repo
201 ... # but not for this repo
202 ... runcommand(server, ['init', 'foo'])
202 ... runcommand(server, ['init', 'foo'])
203 ... runcommand(server, ['-R', 'foo', 'showconfig', 'ui', 'defaults'])
203 ... runcommand(server, ['-R', 'foo', 'showconfig', 'ui', 'defaults'])
204 *** runcommand showconfig
204 *** runcommand showconfig
205 bundle.mainreporoot=$TESTTMP/repo
205 bundle.mainreporoot=$TESTTMP/repo
206 devel.all-warnings=true
206 devel.all-warnings=true
207 devel.default-date=0 0
207 devel.default-date=0 0
208 extensions.fsmonitor= (fsmonitor !)
208 extensions.fsmonitor= (fsmonitor !)
209 largefiles.usercache=$TESTTMP/.cache/largefiles
209 largefiles.usercache=$TESTTMP/.cache/largefiles
210 lfs.usercache=$TESTTMP/.cache/lfs
210 ui.slash=True
211 ui.slash=True
211 ui.interactive=False
212 ui.interactive=False
212 ui.mergemarkers=detailed
213 ui.mergemarkers=detailed
213 ui.usehttp2=true (?)
214 ui.usehttp2=true (?)
214 ui.foo=bar
215 ui.foo=bar
215 ui.nontty=true
216 ui.nontty=true
216 web.address=localhost
217 web.address=localhost
217 web\.ipv6=(?:True|False) (re)
218 web\.ipv6=(?:True|False) (re)
218 *** runcommand init foo
219 *** runcommand init foo
219 *** runcommand -R foo showconfig ui defaults
220 *** runcommand -R foo showconfig ui defaults
220 ui.slash=True
221 ui.slash=True
221 ui.interactive=False
222 ui.interactive=False
222 ui.mergemarkers=detailed
223 ui.mergemarkers=detailed
223 ui.usehttp2=true (?)
224 ui.usehttp2=true (?)
224 ui.nontty=true
225 ui.nontty=true
225
226
226 $ rm -R foo
227 $ rm -R foo
227
228
228 #if windows
229 #if windows
229 $ PYTHONPATH="$TESTTMP/repo;$PYTHONPATH"
230 $ PYTHONPATH="$TESTTMP/repo;$PYTHONPATH"
230 #else
231 #else
231 $ PYTHONPATH="$TESTTMP/repo:$PYTHONPATH"
232 $ PYTHONPATH="$TESTTMP/repo:$PYTHONPATH"
232 #endif
233 #endif
233
234
234 $ cat <<EOF > hook.py
235 $ cat <<EOF > hook.py
235 > from __future__ import print_function
236 > from __future__ import print_function
236 > import sys
237 > import sys
237 > def hook(**args):
238 > def hook(**args):
238 > print('hook talking')
239 > print('hook talking')
239 > print('now try to read something: %r' % sys.stdin.read())
240 > print('now try to read something: %r' % sys.stdin.read())
240 > EOF
241 > EOF
241
242
242 >>> from hgclient import check, readchannel, runcommand, stringio
243 >>> from hgclient import check, readchannel, runcommand, stringio
243 >>> @check
244 >>> @check
244 ... def hookoutput(server):
245 ... def hookoutput(server):
245 ... readchannel(server)
246 ... readchannel(server)
246 ... runcommand(server, ['--config',
247 ... runcommand(server, ['--config',
247 ... 'hooks.pre-identify=python:hook.hook',
248 ... 'hooks.pre-identify=python:hook.hook',
248 ... 'id'],
249 ... 'id'],
249 ... input=stringio('some input'))
250 ... input=stringio('some input'))
250 *** runcommand --config hooks.pre-identify=python:hook.hook id
251 *** runcommand --config hooks.pre-identify=python:hook.hook id
251 eff892de26ec tip
252 eff892de26ec tip
252
253
253 Clean hook cached version
254 Clean hook cached version
254 $ rm hook.py*
255 $ rm hook.py*
255 $ rm -Rf __pycache__
256 $ rm -Rf __pycache__
256
257
257 $ echo a >> a
258 $ echo a >> a
258 >>> import os
259 >>> import os
259 >>> from hgclient import check, readchannel, runcommand
260 >>> from hgclient import check, readchannel, runcommand
260 >>> @check
261 >>> @check
261 ... def outsidechanges(server):
262 ... def outsidechanges(server):
262 ... readchannel(server)
263 ... readchannel(server)
263 ... runcommand(server, ['status'])
264 ... runcommand(server, ['status'])
264 ... os.system('hg ci -Am2')
265 ... os.system('hg ci -Am2')
265 ... runcommand(server, ['tip'])
266 ... runcommand(server, ['tip'])
266 ... runcommand(server, ['status'])
267 ... runcommand(server, ['status'])
267 *** runcommand status
268 *** runcommand status
268 M a
269 M a
269 *** runcommand tip
270 *** runcommand tip
270 changeset: 1:d3a0a68be6de
271 changeset: 1:d3a0a68be6de
271 tag: tip
272 tag: tip
272 user: test
273 user: test
273 date: Thu Jan 01 00:00:00 1970 +0000
274 date: Thu Jan 01 00:00:00 1970 +0000
274 summary: 2
275 summary: 2
275
276
276 *** runcommand status
277 *** runcommand status
277
278
278 >>> import os
279 >>> import os
279 >>> from hgclient import check, readchannel, runcommand
280 >>> from hgclient import check, readchannel, runcommand
280 >>> @check
281 >>> @check
281 ... def bookmarks(server):
282 ... def bookmarks(server):
282 ... readchannel(server)
283 ... readchannel(server)
283 ... runcommand(server, ['bookmarks'])
284 ... runcommand(server, ['bookmarks'])
284 ...
285 ...
285 ... # changes .hg/bookmarks
286 ... # changes .hg/bookmarks
286 ... os.system('hg bookmark -i bm1')
287 ... os.system('hg bookmark -i bm1')
287 ... os.system('hg bookmark -i bm2')
288 ... os.system('hg bookmark -i bm2')
288 ... runcommand(server, ['bookmarks'])
289 ... runcommand(server, ['bookmarks'])
289 ...
290 ...
290 ... # changes .hg/bookmarks.current
291 ... # changes .hg/bookmarks.current
291 ... os.system('hg upd bm1 -q')
292 ... os.system('hg upd bm1 -q')
292 ... runcommand(server, ['bookmarks'])
293 ... runcommand(server, ['bookmarks'])
293 ...
294 ...
294 ... runcommand(server, ['bookmarks', 'bm3'])
295 ... runcommand(server, ['bookmarks', 'bm3'])
295 ... f = open('a', 'ab')
296 ... f = open('a', 'ab')
296 ... f.write('a\n')
297 ... f.write('a\n')
297 ... f.close()
298 ... f.close()
298 ... runcommand(server, ['commit', '-Amm'])
299 ... runcommand(server, ['commit', '-Amm'])
299 ... runcommand(server, ['bookmarks'])
300 ... runcommand(server, ['bookmarks'])
300 ... print('')
301 ... print('')
301 *** runcommand bookmarks
302 *** runcommand bookmarks
302 no bookmarks set
303 no bookmarks set
303 *** runcommand bookmarks
304 *** runcommand bookmarks
304 bm1 1:d3a0a68be6de
305 bm1 1:d3a0a68be6de
305 bm2 1:d3a0a68be6de
306 bm2 1:d3a0a68be6de
306 *** runcommand bookmarks
307 *** runcommand bookmarks
307 * bm1 1:d3a0a68be6de
308 * bm1 1:d3a0a68be6de
308 bm2 1:d3a0a68be6de
309 bm2 1:d3a0a68be6de
309 *** runcommand bookmarks bm3
310 *** runcommand bookmarks bm3
310 *** runcommand commit -Amm
311 *** runcommand commit -Amm
311 *** runcommand bookmarks
312 *** runcommand bookmarks
312 bm1 1:d3a0a68be6de
313 bm1 1:d3a0a68be6de
313 bm2 1:d3a0a68be6de
314 bm2 1:d3a0a68be6de
314 * bm3 2:aef17e88f5f0
315 * bm3 2:aef17e88f5f0
315
316
316
317
317 >>> import os
318 >>> import os
318 >>> from hgclient import check, readchannel, runcommand
319 >>> from hgclient import check, readchannel, runcommand
319 >>> @check
320 >>> @check
320 ... def tagscache(server):
321 ... def tagscache(server):
321 ... readchannel(server)
322 ... readchannel(server)
322 ... runcommand(server, ['id', '-t', '-r', '0'])
323 ... runcommand(server, ['id', '-t', '-r', '0'])
323 ... os.system('hg tag -r 0 foo')
324 ... os.system('hg tag -r 0 foo')
324 ... runcommand(server, ['id', '-t', '-r', '0'])
325 ... runcommand(server, ['id', '-t', '-r', '0'])
325 *** runcommand id -t -r 0
326 *** runcommand id -t -r 0
326
327
327 *** runcommand id -t -r 0
328 *** runcommand id -t -r 0
328 foo
329 foo
329
330
330 >>> import os
331 >>> import os
331 >>> from hgclient import check, readchannel, runcommand
332 >>> from hgclient import check, readchannel, runcommand
332 >>> @check
333 >>> @check
333 ... def setphase(server):
334 ... def setphase(server):
334 ... readchannel(server)
335 ... readchannel(server)
335 ... runcommand(server, ['phase', '-r', '.'])
336 ... runcommand(server, ['phase', '-r', '.'])
336 ... os.system('hg phase -r . -p')
337 ... os.system('hg phase -r . -p')
337 ... runcommand(server, ['phase', '-r', '.'])
338 ... runcommand(server, ['phase', '-r', '.'])
338 *** runcommand phase -r .
339 *** runcommand phase -r .
339 3: draft
340 3: draft
340 *** runcommand phase -r .
341 *** runcommand phase -r .
341 3: public
342 3: public
342
343
343 $ echo a >> a
344 $ echo a >> a
344 >>> from hgclient import check, readchannel, runcommand
345 >>> from hgclient import check, readchannel, runcommand
345 >>> @check
346 >>> @check
346 ... def rollback(server):
347 ... def rollback(server):
347 ... readchannel(server)
348 ... readchannel(server)
348 ... runcommand(server, ['phase', '-r', '.', '-p'])
349 ... runcommand(server, ['phase', '-r', '.', '-p'])
349 ... runcommand(server, ['commit', '-Am.'])
350 ... runcommand(server, ['commit', '-Am.'])
350 ... runcommand(server, ['rollback'])
351 ... runcommand(server, ['rollback'])
351 ... runcommand(server, ['phase', '-r', '.'])
352 ... runcommand(server, ['phase', '-r', '.'])
352 ... print('')
353 ... print('')
353 *** runcommand phase -r . -p
354 *** runcommand phase -r . -p
354 no phases changed
355 no phases changed
355 *** runcommand commit -Am.
356 *** runcommand commit -Am.
356 *** runcommand rollback
357 *** runcommand rollback
357 repository tip rolled back to revision 3 (undo commit)
358 repository tip rolled back to revision 3 (undo commit)
358 working directory now based on revision 3
359 working directory now based on revision 3
359 *** runcommand phase -r .
360 *** runcommand phase -r .
360 3: public
361 3: public
361
362
362
363
363 >>> import os
364 >>> import os
364 >>> from hgclient import check, readchannel, runcommand
365 >>> from hgclient import check, readchannel, runcommand
365 >>> @check
366 >>> @check
366 ... def branch(server):
367 ... def branch(server):
367 ... readchannel(server)
368 ... readchannel(server)
368 ... runcommand(server, ['branch'])
369 ... runcommand(server, ['branch'])
369 ... os.system('hg branch foo')
370 ... os.system('hg branch foo')
370 ... runcommand(server, ['branch'])
371 ... runcommand(server, ['branch'])
371 ... os.system('hg branch default')
372 ... os.system('hg branch default')
372 *** runcommand branch
373 *** runcommand branch
373 default
374 default
374 marked working directory as branch foo
375 marked working directory as branch foo
375 (branches are permanent and global, did you want a bookmark?)
376 (branches are permanent and global, did you want a bookmark?)
376 *** runcommand branch
377 *** runcommand branch
377 foo
378 foo
378 marked working directory as branch default
379 marked working directory as branch default
379 (branches are permanent and global, did you want a bookmark?)
380 (branches are permanent and global, did you want a bookmark?)
380
381
381 $ touch .hgignore
382 $ touch .hgignore
382 >>> import os
383 >>> import os
383 >>> from hgclient import check, readchannel, runcommand
384 >>> from hgclient import check, readchannel, runcommand
384 >>> @check
385 >>> @check
385 ... def hgignore(server):
386 ... def hgignore(server):
386 ... readchannel(server)
387 ... readchannel(server)
387 ... runcommand(server, ['commit', '-Am.'])
388 ... runcommand(server, ['commit', '-Am.'])
388 ... f = open('ignored-file', 'ab')
389 ... f = open('ignored-file', 'ab')
389 ... f.write('')
390 ... f.write('')
390 ... f.close()
391 ... f.close()
391 ... f = open('.hgignore', 'ab')
392 ... f = open('.hgignore', 'ab')
392 ... f.write('ignored-file')
393 ... f.write('ignored-file')
393 ... f.close()
394 ... f.close()
394 ... runcommand(server, ['status', '-i', '-u'])
395 ... runcommand(server, ['status', '-i', '-u'])
395 ... print('')
396 ... print('')
396 *** runcommand commit -Am.
397 *** runcommand commit -Am.
397 adding .hgignore
398 adding .hgignore
398 *** runcommand status -i -u
399 *** runcommand status -i -u
399 I ignored-file
400 I ignored-file
400
401
401
402
402 cache of non-public revisions should be invalidated on repository change
403 cache of non-public revisions should be invalidated on repository change
403 (issue4855):
404 (issue4855):
404
405
405 >>> import os
406 >>> import os
406 >>> from hgclient import check, readchannel, runcommand
407 >>> from hgclient import check, readchannel, runcommand
407 >>> @check
408 >>> @check
408 ... def phasesetscacheaftercommit(server):
409 ... def phasesetscacheaftercommit(server):
409 ... readchannel(server)
410 ... readchannel(server)
410 ... # load _phasecache._phaserevs and _phasesets
411 ... # load _phasecache._phaserevs and _phasesets
411 ... runcommand(server, ['log', '-qr', 'draft()'])
412 ... runcommand(server, ['log', '-qr', 'draft()'])
412 ... # create draft commits by another process
413 ... # create draft commits by another process
413 ... for i in xrange(5, 7):
414 ... for i in xrange(5, 7):
414 ... f = open('a', 'ab')
415 ... f = open('a', 'ab')
415 ... f.seek(0, os.SEEK_END)
416 ... f.seek(0, os.SEEK_END)
416 ... f.write('a\n')
417 ... f.write('a\n')
417 ... f.close()
418 ... f.close()
418 ... os.system('hg commit -Aqm%d' % i)
419 ... os.system('hg commit -Aqm%d' % i)
419 ... # new commits should be listed as draft revisions
420 ... # new commits should be listed as draft revisions
420 ... runcommand(server, ['log', '-qr', 'draft()'])
421 ... runcommand(server, ['log', '-qr', 'draft()'])
421 ... print('')
422 ... print('')
422 *** runcommand log -qr draft()
423 *** runcommand log -qr draft()
423 4:7966c8e3734d
424 4:7966c8e3734d
424 *** runcommand log -qr draft()
425 *** runcommand log -qr draft()
425 4:7966c8e3734d
426 4:7966c8e3734d
426 5:41f6602d1c4f
427 5:41f6602d1c4f
427 6:10501e202c35
428 6:10501e202c35
428
429
429
430
430 >>> import os
431 >>> import os
431 >>> from hgclient import check, readchannel, runcommand
432 >>> from hgclient import check, readchannel, runcommand
432 >>> @check
433 >>> @check
433 ... def phasesetscacheafterstrip(server):
434 ... def phasesetscacheafterstrip(server):
434 ... readchannel(server)
435 ... readchannel(server)
435 ... # load _phasecache._phaserevs and _phasesets
436 ... # load _phasecache._phaserevs and _phasesets
436 ... runcommand(server, ['log', '-qr', 'draft()'])
437 ... runcommand(server, ['log', '-qr', 'draft()'])
437 ... # strip cached revisions by another process
438 ... # strip cached revisions by another process
438 ... os.system('hg --config extensions.strip= strip -q 5')
439 ... os.system('hg --config extensions.strip= strip -q 5')
439 ... # shouldn't abort by "unknown revision '6'"
440 ... # shouldn't abort by "unknown revision '6'"
440 ... runcommand(server, ['log', '-qr', 'draft()'])
441 ... runcommand(server, ['log', '-qr', 'draft()'])
441 ... print('')
442 ... print('')
442 *** runcommand log -qr draft()
443 *** runcommand log -qr draft()
443 4:7966c8e3734d
444 4:7966c8e3734d
444 5:41f6602d1c4f
445 5:41f6602d1c4f
445 6:10501e202c35
446 6:10501e202c35
446 *** runcommand log -qr draft()
447 *** runcommand log -qr draft()
447 4:7966c8e3734d
448 4:7966c8e3734d
448
449
449
450
450 cache of phase roots should be invalidated on strip (issue3827):
451 cache of phase roots should be invalidated on strip (issue3827):
451
452
452 >>> import os
453 >>> import os
453 >>> from hgclient import check, readchannel, runcommand, sep
454 >>> from hgclient import check, readchannel, runcommand, sep
454 >>> @check
455 >>> @check
455 ... def phasecacheafterstrip(server):
456 ... def phasecacheafterstrip(server):
456 ... readchannel(server)
457 ... readchannel(server)
457 ...
458 ...
458 ... # create new head, 5:731265503d86
459 ... # create new head, 5:731265503d86
459 ... runcommand(server, ['update', '-C', '0'])
460 ... runcommand(server, ['update', '-C', '0'])
460 ... f = open('a', 'ab')
461 ... f = open('a', 'ab')
461 ... f.write('a\n')
462 ... f.write('a\n')
462 ... f.close()
463 ... f.close()
463 ... runcommand(server, ['commit', '-Am.', 'a'])
464 ... runcommand(server, ['commit', '-Am.', 'a'])
464 ... runcommand(server, ['log', '-Gq'])
465 ... runcommand(server, ['log', '-Gq'])
465 ...
466 ...
466 ... # make it public; draft marker moves to 4:7966c8e3734d
467 ... # make it public; draft marker moves to 4:7966c8e3734d
467 ... runcommand(server, ['phase', '-p', '.'])
468 ... runcommand(server, ['phase', '-p', '.'])
468 ... # load _phasecache.phaseroots
469 ... # load _phasecache.phaseroots
469 ... runcommand(server, ['phase', '.'], outfilter=sep)
470 ... runcommand(server, ['phase', '.'], outfilter=sep)
470 ...
471 ...
471 ... # strip 1::4 outside server
472 ... # strip 1::4 outside server
472 ... os.system('hg -q --config extensions.mq= strip 1')
473 ... os.system('hg -q --config extensions.mq= strip 1')
473 ...
474 ...
474 ... # shouldn't raise "7966c8e3734d: no node!"
475 ... # shouldn't raise "7966c8e3734d: no node!"
475 ... runcommand(server, ['branches'])
476 ... runcommand(server, ['branches'])
476 *** runcommand update -C 0
477 *** runcommand update -C 0
477 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
478 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
478 (leaving bookmark bm3)
479 (leaving bookmark bm3)
479 *** runcommand commit -Am. a
480 *** runcommand commit -Am. a
480 created new head
481 created new head
481 *** runcommand log -Gq
482 *** runcommand log -Gq
482 @ 5:731265503d86
483 @ 5:731265503d86
483 |
484 |
484 | o 4:7966c8e3734d
485 | o 4:7966c8e3734d
485 | |
486 | |
486 | o 3:b9b85890c400
487 | o 3:b9b85890c400
487 | |
488 | |
488 | o 2:aef17e88f5f0
489 | o 2:aef17e88f5f0
489 | |
490 | |
490 | o 1:d3a0a68be6de
491 | o 1:d3a0a68be6de
491 |/
492 |/
492 o 0:eff892de26ec
493 o 0:eff892de26ec
493
494
494 *** runcommand phase -p .
495 *** runcommand phase -p .
495 *** runcommand phase .
496 *** runcommand phase .
496 5: public
497 5: public
497 *** runcommand branches
498 *** runcommand branches
498 default 1:731265503d86
499 default 1:731265503d86
499
500
500 in-memory cache must be reloaded if transaction is aborted. otherwise
501 in-memory cache must be reloaded if transaction is aborted. otherwise
501 changelog and manifest would have invalid node:
502 changelog and manifest would have invalid node:
502
503
503 $ echo a >> a
504 $ echo a >> a
504 >>> from hgclient import check, readchannel, runcommand
505 >>> from hgclient import check, readchannel, runcommand
505 >>> @check
506 >>> @check
506 ... def txabort(server):
507 ... def txabort(server):
507 ... readchannel(server)
508 ... readchannel(server)
508 ... runcommand(server, ['commit', '--config', 'hooks.pretxncommit=false',
509 ... runcommand(server, ['commit', '--config', 'hooks.pretxncommit=false',
509 ... '-mfoo'])
510 ... '-mfoo'])
510 ... runcommand(server, ['verify'])
511 ... runcommand(server, ['verify'])
511 *** runcommand commit --config hooks.pretxncommit=false -mfoo
512 *** runcommand commit --config hooks.pretxncommit=false -mfoo
512 transaction abort!
513 transaction abort!
513 rollback completed
514 rollback completed
514 abort: pretxncommit hook exited with status 1
515 abort: pretxncommit hook exited with status 1
515 [255]
516 [255]
516 *** runcommand verify
517 *** runcommand verify
517 checking changesets
518 checking changesets
518 checking manifests
519 checking manifests
519 crosschecking files in changesets and manifests
520 crosschecking files in changesets and manifests
520 checking files
521 checking files
521 1 files, 2 changesets, 2 total revisions
522 1 files, 2 changesets, 2 total revisions
522 $ hg revert --no-backup -aq
523 $ hg revert --no-backup -aq
523
524
524 $ cat >> .hg/hgrc << EOF
525 $ cat >> .hg/hgrc << EOF
525 > [experimental]
526 > [experimental]
526 > evolution.createmarkers=True
527 > evolution.createmarkers=True
527 > EOF
528 > EOF
528
529
529 >>> import os
530 >>> import os
530 >>> from hgclient import check, readchannel, runcommand
531 >>> from hgclient import check, readchannel, runcommand
531 >>> @check
532 >>> @check
532 ... def obsolete(server):
533 ... def obsolete(server):
533 ... readchannel(server)
534 ... readchannel(server)
534 ...
535 ...
535 ... runcommand(server, ['up', 'null'])
536 ... runcommand(server, ['up', 'null'])
536 ... runcommand(server, ['phase', '-df', 'tip'])
537 ... runcommand(server, ['phase', '-df', 'tip'])
537 ... cmd = 'hg debugobsolete `hg log -r tip --template {node}`'
538 ... cmd = 'hg debugobsolete `hg log -r tip --template {node}`'
538 ... if os.name == 'nt':
539 ... if os.name == 'nt':
539 ... cmd = 'sh -c "%s"' % cmd # run in sh, not cmd.exe
540 ... cmd = 'sh -c "%s"' % cmd # run in sh, not cmd.exe
540 ... os.system(cmd)
541 ... os.system(cmd)
541 ... runcommand(server, ['log', '--hidden'])
542 ... runcommand(server, ['log', '--hidden'])
542 ... runcommand(server, ['log'])
543 ... runcommand(server, ['log'])
543 *** runcommand up null
544 *** runcommand up null
544 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
545 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
545 *** runcommand phase -df tip
546 *** runcommand phase -df tip
546 obsoleted 1 changesets
547 obsoleted 1 changesets
547 *** runcommand log --hidden
548 *** runcommand log --hidden
548 changeset: 1:731265503d86
549 changeset: 1:731265503d86
549 tag: tip
550 tag: tip
550 user: test
551 user: test
551 date: Thu Jan 01 00:00:00 1970 +0000
552 date: Thu Jan 01 00:00:00 1970 +0000
552 obsolete: pruned
553 obsolete: pruned
553 summary: .
554 summary: .
554
555
555 changeset: 0:eff892de26ec
556 changeset: 0:eff892de26ec
556 bookmark: bm1
557 bookmark: bm1
557 bookmark: bm2
558 bookmark: bm2
558 bookmark: bm3
559 bookmark: bm3
559 user: test
560 user: test
560 date: Thu Jan 01 00:00:00 1970 +0000
561 date: Thu Jan 01 00:00:00 1970 +0000
561 summary: 1
562 summary: 1
562
563
563 *** runcommand log
564 *** runcommand log
564 changeset: 0:eff892de26ec
565 changeset: 0:eff892de26ec
565 bookmark: bm1
566 bookmark: bm1
566 bookmark: bm2
567 bookmark: bm2
567 bookmark: bm3
568 bookmark: bm3
568 tag: tip
569 tag: tip
569 user: test
570 user: test
570 date: Thu Jan 01 00:00:00 1970 +0000
571 date: Thu Jan 01 00:00:00 1970 +0000
571 summary: 1
572 summary: 1
572
573
573
574
574 $ cat <<EOF >> .hg/hgrc
575 $ cat <<EOF >> .hg/hgrc
575 > [extensions]
576 > [extensions]
576 > mq =
577 > mq =
577 > EOF
578 > EOF
578
579
579 >>> import os
580 >>> import os
580 >>> from hgclient import check, readchannel, runcommand
581 >>> from hgclient import check, readchannel, runcommand
581 >>> @check
582 >>> @check
582 ... def mqoutsidechanges(server):
583 ... def mqoutsidechanges(server):
583 ... readchannel(server)
584 ... readchannel(server)
584 ...
585 ...
585 ... # load repo.mq
586 ... # load repo.mq
586 ... runcommand(server, ['qapplied'])
587 ... runcommand(server, ['qapplied'])
587 ... os.system('hg qnew 0.diff')
588 ... os.system('hg qnew 0.diff')
588 ... # repo.mq should be invalidated
589 ... # repo.mq should be invalidated
589 ... runcommand(server, ['qapplied'])
590 ... runcommand(server, ['qapplied'])
590 ...
591 ...
591 ... runcommand(server, ['qpop', '--all'])
592 ... runcommand(server, ['qpop', '--all'])
592 ... os.system('hg qqueue --create foo')
593 ... os.system('hg qqueue --create foo')
593 ... # repo.mq should be recreated to point to new queue
594 ... # repo.mq should be recreated to point to new queue
594 ... runcommand(server, ['qqueue', '--active'])
595 ... runcommand(server, ['qqueue', '--active'])
595 *** runcommand qapplied
596 *** runcommand qapplied
596 *** runcommand qapplied
597 *** runcommand qapplied
597 0.diff
598 0.diff
598 *** runcommand qpop --all
599 *** runcommand qpop --all
599 popping 0.diff
600 popping 0.diff
600 patch queue now empty
601 patch queue now empty
601 *** runcommand qqueue --active
602 *** runcommand qqueue --active
602 foo
603 foo
603
604
604 $ cat <<EOF > dbgui.py
605 $ cat <<EOF > dbgui.py
605 > import os
606 > import os
606 > import sys
607 > import sys
607 > from mercurial import commands, registrar
608 > from mercurial import commands, registrar
608 > cmdtable = {}
609 > cmdtable = {}
609 > command = registrar.command(cmdtable)
610 > command = registrar.command(cmdtable)
610 > @command(b"debuggetpass", norepo=True)
611 > @command(b"debuggetpass", norepo=True)
611 > def debuggetpass(ui):
612 > def debuggetpass(ui):
612 > ui.write("%s\\n" % ui.getpass())
613 > ui.write("%s\\n" % ui.getpass())
613 > @command(b"debugprompt", norepo=True)
614 > @command(b"debugprompt", norepo=True)
614 > def debugprompt(ui):
615 > def debugprompt(ui):
615 > ui.write("%s\\n" % ui.prompt("prompt:"))
616 > ui.write("%s\\n" % ui.prompt("prompt:"))
616 > @command(b"debugreadstdin", norepo=True)
617 > @command(b"debugreadstdin", norepo=True)
617 > def debugreadstdin(ui):
618 > def debugreadstdin(ui):
618 > ui.write("read: %r\n" % sys.stdin.read(1))
619 > ui.write("read: %r\n" % sys.stdin.read(1))
619 > @command(b"debugwritestdout", norepo=True)
620 > @command(b"debugwritestdout", norepo=True)
620 > def debugwritestdout(ui):
621 > def debugwritestdout(ui):
621 > os.write(1, "low-level stdout fd and\n")
622 > os.write(1, "low-level stdout fd and\n")
622 > sys.stdout.write("stdout should be redirected to /dev/null\n")
623 > sys.stdout.write("stdout should be redirected to /dev/null\n")
623 > sys.stdout.flush()
624 > sys.stdout.flush()
624 > EOF
625 > EOF
625 $ cat <<EOF >> .hg/hgrc
626 $ cat <<EOF >> .hg/hgrc
626 > [extensions]
627 > [extensions]
627 > dbgui = dbgui.py
628 > dbgui = dbgui.py
628 > EOF
629 > EOF
629
630
630 >>> from hgclient import check, readchannel, runcommand, stringio
631 >>> from hgclient import check, readchannel, runcommand, stringio
631 >>> @check
632 >>> @check
632 ... def getpass(server):
633 ... def getpass(server):
633 ... readchannel(server)
634 ... readchannel(server)
634 ... runcommand(server, ['debuggetpass', '--config',
635 ... runcommand(server, ['debuggetpass', '--config',
635 ... 'ui.interactive=True'],
636 ... 'ui.interactive=True'],
636 ... input=stringio('1234\n'))
637 ... input=stringio('1234\n'))
637 ... runcommand(server, ['debuggetpass', '--config',
638 ... runcommand(server, ['debuggetpass', '--config',
638 ... 'ui.interactive=True'],
639 ... 'ui.interactive=True'],
639 ... input=stringio('\n'))
640 ... input=stringio('\n'))
640 ... runcommand(server, ['debuggetpass', '--config',
641 ... runcommand(server, ['debuggetpass', '--config',
641 ... 'ui.interactive=True'],
642 ... 'ui.interactive=True'],
642 ... input=stringio(''))
643 ... input=stringio(''))
643 ... runcommand(server, ['debugprompt', '--config',
644 ... runcommand(server, ['debugprompt', '--config',
644 ... 'ui.interactive=True'],
645 ... 'ui.interactive=True'],
645 ... input=stringio('5678\n'))
646 ... input=stringio('5678\n'))
646 ... runcommand(server, ['debugreadstdin'])
647 ... runcommand(server, ['debugreadstdin'])
647 ... runcommand(server, ['debugwritestdout'])
648 ... runcommand(server, ['debugwritestdout'])
648 *** runcommand debuggetpass --config ui.interactive=True
649 *** runcommand debuggetpass --config ui.interactive=True
649 password: 1234
650 password: 1234
650 *** runcommand debuggetpass --config ui.interactive=True
651 *** runcommand debuggetpass --config ui.interactive=True
651 password:
652 password:
652 *** runcommand debuggetpass --config ui.interactive=True
653 *** runcommand debuggetpass --config ui.interactive=True
653 password: abort: response expected
654 password: abort: response expected
654 [255]
655 [255]
655 *** runcommand debugprompt --config ui.interactive=True
656 *** runcommand debugprompt --config ui.interactive=True
656 prompt: 5678
657 prompt: 5678
657 *** runcommand debugreadstdin
658 *** runcommand debugreadstdin
658 read: ''
659 read: ''
659 *** runcommand debugwritestdout
660 *** runcommand debugwritestdout
660
661
661
662
662 run commandserver in commandserver, which is silly but should work:
663 run commandserver in commandserver, which is silly but should work:
663
664
664 >>> from __future__ import print_function
665 >>> from __future__ import print_function
665 >>> from hgclient import check, readchannel, runcommand, stringio
666 >>> from hgclient import check, readchannel, runcommand, stringio
666 >>> @check
667 >>> @check
667 ... def nested(server):
668 ... def nested(server):
668 ... print('%c, %r' % readchannel(server))
669 ... print('%c, %r' % readchannel(server))
669 ... class nestedserver(object):
670 ... class nestedserver(object):
670 ... stdin = stringio('getencoding\n')
671 ... stdin = stringio('getencoding\n')
671 ... stdout = stringio()
672 ... stdout = stringio()
672 ... runcommand(server, ['serve', '--cmdserver', 'pipe'],
673 ... runcommand(server, ['serve', '--cmdserver', 'pipe'],
673 ... output=nestedserver.stdout, input=nestedserver.stdin)
674 ... output=nestedserver.stdout, input=nestedserver.stdin)
674 ... nestedserver.stdout.seek(0)
675 ... nestedserver.stdout.seek(0)
675 ... print('%c, %r' % readchannel(nestedserver)) # hello
676 ... print('%c, %r' % readchannel(nestedserver)) # hello
676 ... print('%c, %r' % readchannel(nestedserver)) # getencoding
677 ... print('%c, %r' % readchannel(nestedserver)) # getencoding
677 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
678 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
678 *** runcommand serve --cmdserver pipe
679 *** runcommand serve --cmdserver pipe
679 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
680 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
680 r, '*' (glob)
681 r, '*' (glob)
681
682
682
683
683 start without repository:
684 start without repository:
684
685
685 $ cd ..
686 $ cd ..
686
687
687 >>> from __future__ import print_function
688 >>> from __future__ import print_function
688 >>> from hgclient import check, readchannel, runcommand
689 >>> from hgclient import check, readchannel, runcommand
689 >>> @check
690 >>> @check
690 ... def hellomessage(server):
691 ... def hellomessage(server):
691 ... ch, data = readchannel(server)
692 ... ch, data = readchannel(server)
692 ... print('%c, %r' % (ch, data))
693 ... print('%c, %r' % (ch, data))
693 ... # run an arbitrary command to make sure the next thing the server
694 ... # run an arbitrary command to make sure the next thing the server
694 ... # sends isn't part of the hello message
695 ... # sends isn't part of the hello message
695 ... runcommand(server, ['id'])
696 ... runcommand(server, ['id'])
696 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
697 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
697 *** runcommand id
698 *** runcommand id
698 abort: there is no Mercurial repository here (.hg not found)
699 abort: there is no Mercurial repository here (.hg not found)
699 [255]
700 [255]
700
701
701 >>> from hgclient import check, readchannel, runcommand
702 >>> from hgclient import check, readchannel, runcommand
702 >>> @check
703 >>> @check
703 ... def startwithoutrepo(server):
704 ... def startwithoutrepo(server):
704 ... readchannel(server)
705 ... readchannel(server)
705 ... runcommand(server, ['init', 'repo2'])
706 ... runcommand(server, ['init', 'repo2'])
706 ... runcommand(server, ['id', '-R', 'repo2'])
707 ... runcommand(server, ['id', '-R', 'repo2'])
707 *** runcommand init repo2
708 *** runcommand init repo2
708 *** runcommand id -R repo2
709 *** runcommand id -R repo2
709 000000000000 tip
710 000000000000 tip
710
711
711
712
712 don't fall back to cwd if invalid -R path is specified (issue4805):
713 don't fall back to cwd if invalid -R path is specified (issue4805):
713
714
714 $ cd repo
715 $ cd repo
715 $ hg serve --cmdserver pipe -R ../nonexistent
716 $ hg serve --cmdserver pipe -R ../nonexistent
716 abort: repository ../nonexistent not found!
717 abort: repository ../nonexistent not found!
717 [255]
718 [255]
718 $ cd ..
719 $ cd ..
719
720
720
721
721 unix domain socket:
722 unix domain socket:
722
723
723 $ cd repo
724 $ cd repo
724 $ hg update -q
725 $ hg update -q
725
726
726 #if unix-socket unix-permissions
727 #if unix-socket unix-permissions
727
728
728 >>> from __future__ import print_function
729 >>> from __future__ import print_function
729 >>> from hgclient import check, readchannel, runcommand, stringio, unixserver
730 >>> from hgclient import check, readchannel, runcommand, stringio, unixserver
730 >>> server = unixserver('.hg/server.sock', '.hg/server.log')
731 >>> server = unixserver('.hg/server.sock', '.hg/server.log')
731 >>> def hellomessage(conn):
732 >>> def hellomessage(conn):
732 ... ch, data = readchannel(conn)
733 ... ch, data = readchannel(conn)
733 ... print('%c, %r' % (ch, data))
734 ... print('%c, %r' % (ch, data))
734 ... runcommand(conn, ['id'])
735 ... runcommand(conn, ['id'])
735 >>> check(hellomessage, server.connect)
736 >>> check(hellomessage, server.connect)
736 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
737 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
737 *** runcommand id
738 *** runcommand id
738 eff892de26ec tip bm1/bm2/bm3
739 eff892de26ec tip bm1/bm2/bm3
739 >>> def unknowncommand(conn):
740 >>> def unknowncommand(conn):
740 ... readchannel(conn)
741 ... readchannel(conn)
741 ... conn.stdin.write('unknowncommand\n')
742 ... conn.stdin.write('unknowncommand\n')
742 >>> check(unknowncommand, server.connect) # error sent to server.log
743 >>> check(unknowncommand, server.connect) # error sent to server.log
743 >>> def serverinput(conn):
744 >>> def serverinput(conn):
744 ... readchannel(conn)
745 ... readchannel(conn)
745 ... patch = """
746 ... patch = """
746 ... # HG changeset patch
747 ... # HG changeset patch
747 ... # User test
748 ... # User test
748 ... # Date 0 0
749 ... # Date 0 0
749 ... 2
750 ... 2
750 ...
751 ...
751 ... diff -r eff892de26ec -r 1ed24be7e7a0 a
752 ... diff -r eff892de26ec -r 1ed24be7e7a0 a
752 ... --- a/a
753 ... --- a/a
753 ... +++ b/a
754 ... +++ b/a
754 ... @@ -1,1 +1,2 @@
755 ... @@ -1,1 +1,2 @@
755 ... 1
756 ... 1
756 ... +2
757 ... +2
757 ... """
758 ... """
758 ... runcommand(conn, ['import', '-'], input=stringio(patch))
759 ... runcommand(conn, ['import', '-'], input=stringio(patch))
759 ... runcommand(conn, ['log', '-rtip', '-q'])
760 ... runcommand(conn, ['log', '-rtip', '-q'])
760 >>> check(serverinput, server.connect)
761 >>> check(serverinput, server.connect)
761 *** runcommand import -
762 *** runcommand import -
762 applying patch from stdin
763 applying patch from stdin
763 *** runcommand log -rtip -q
764 *** runcommand log -rtip -q
764 2:1ed24be7e7a0
765 2:1ed24be7e7a0
765 >>> server.shutdown()
766 >>> server.shutdown()
766
767
767 $ cat .hg/server.log
768 $ cat .hg/server.log
768 listening at .hg/server.sock
769 listening at .hg/server.sock
769 abort: unknown command unknowncommand
770 abort: unknown command unknowncommand
770 killed!
771 killed!
771 $ rm .hg/server.log
772 $ rm .hg/server.log
772
773
773 if server crashed before hello, traceback will be sent to 'e' channel as
774 if server crashed before hello, traceback will be sent to 'e' channel as
774 last ditch:
775 last ditch:
775
776
776 $ cat <<EOF >> .hg/hgrc
777 $ cat <<EOF >> .hg/hgrc
777 > [cmdserver]
778 > [cmdserver]
778 > log = inexistent/path.log
779 > log = inexistent/path.log
779 > EOF
780 > EOF
780 >>> from __future__ import print_function
781 >>> from __future__ import print_function
781 >>> from hgclient import check, readchannel, unixserver
782 >>> from hgclient import check, readchannel, unixserver
782 >>> server = unixserver('.hg/server.sock', '.hg/server.log')
783 >>> server = unixserver('.hg/server.sock', '.hg/server.log')
783 >>> def earlycrash(conn):
784 >>> def earlycrash(conn):
784 ... while True:
785 ... while True:
785 ... try:
786 ... try:
786 ... ch, data = readchannel(conn)
787 ... ch, data = readchannel(conn)
787 ... if not data.startswith(' '):
788 ... if not data.startswith(' '):
788 ... print('%c, %r' % (ch, data))
789 ... print('%c, %r' % (ch, data))
789 ... except EOFError:
790 ... except EOFError:
790 ... break
791 ... break
791 >>> check(earlycrash, server.connect)
792 >>> check(earlycrash, server.connect)
792 e, 'Traceback (most recent call last):\n'
793 e, 'Traceback (most recent call last):\n'
793 e, "IOError: *" (glob)
794 e, "IOError: *" (glob)
794 >>> server.shutdown()
795 >>> server.shutdown()
795
796
796 $ cat .hg/server.log | grep -v '^ '
797 $ cat .hg/server.log | grep -v '^ '
797 listening at .hg/server.sock
798 listening at .hg/server.sock
798 Traceback (most recent call last):
799 Traceback (most recent call last):
799 IOError: * (glob)
800 IOError: * (glob)
800 killed!
801 killed!
801 #endif
802 #endif
802 #if no-unix-socket
803 #if no-unix-socket
803
804
804 $ hg serve --cmdserver unix -a .hg/server.sock
805 $ hg serve --cmdserver unix -a .hg/server.sock
805 abort: unsupported platform
806 abort: unsupported platform
806 [255]
807 [255]
807
808
808 #endif
809 #endif
809
810
810 $ cd ..
811 $ cd ..
811
812
812 Test that accessing to invalid changelog cache is avoided at
813 Test that accessing to invalid changelog cache is avoided at
813 subsequent operations even if repo object is reused even after failure
814 subsequent operations even if repo object is reused even after failure
814 of transaction (see 0a7610758c42 also)
815 of transaction (see 0a7610758c42 also)
815
816
816 "hg log" after failure of transaction is needed to detect invalid
817 "hg log" after failure of transaction is needed to detect invalid
817 cache in repoview: this can't detect by "hg verify" only.
818 cache in repoview: this can't detect by "hg verify" only.
818
819
819 Combination of "finalization" and "empty-ness of changelog" (2 x 2 =
820 Combination of "finalization" and "empty-ness of changelog" (2 x 2 =
820 4) are tested, because '00changelog.i' are differently changed in each
821 4) are tested, because '00changelog.i' are differently changed in each
821 cases.
822 cases.
822
823
823 $ cat > $TESTTMP/failafterfinalize.py <<EOF
824 $ cat > $TESTTMP/failafterfinalize.py <<EOF
824 > # extension to abort transaction after finalization forcibly
825 > # extension to abort transaction after finalization forcibly
825 > from mercurial import commands, error, extensions, lock as lockmod
826 > from mercurial import commands, error, extensions, lock as lockmod
826 > from mercurial import registrar
827 > from mercurial import registrar
827 > cmdtable = {}
828 > cmdtable = {}
828 > command = registrar.command(cmdtable)
829 > command = registrar.command(cmdtable)
829 > configtable = {}
830 > configtable = {}
830 > configitem = registrar.configitem(configtable)
831 > configitem = registrar.configitem(configtable)
831 > configitem('failafterfinalize', 'fail',
832 > configitem('failafterfinalize', 'fail',
832 > default=None,
833 > default=None,
833 > )
834 > )
834 > def fail(tr):
835 > def fail(tr):
835 > raise error.Abort('fail after finalization')
836 > raise error.Abort('fail after finalization')
836 > def reposetup(ui, repo):
837 > def reposetup(ui, repo):
837 > class failrepo(repo.__class__):
838 > class failrepo(repo.__class__):
838 > def commitctx(self, ctx, error=False):
839 > def commitctx(self, ctx, error=False):
839 > if self.ui.configbool('failafterfinalize', 'fail'):
840 > if self.ui.configbool('failafterfinalize', 'fail'):
840 > # 'sorted()' by ASCII code on category names causes
841 > # 'sorted()' by ASCII code on category names causes
841 > # invoking 'fail' after finalization of changelog
842 > # invoking 'fail' after finalization of changelog
842 > # using "'cl-%i' % id(self)" as category name
843 > # using "'cl-%i' % id(self)" as category name
843 > self.currenttransaction().addfinalize('zzzzzzzz', fail)
844 > self.currenttransaction().addfinalize('zzzzzzzz', fail)
844 > return super(failrepo, self).commitctx(ctx, error)
845 > return super(failrepo, self).commitctx(ctx, error)
845 > repo.__class__ = failrepo
846 > repo.__class__ = failrepo
846 > EOF
847 > EOF
847
848
848 $ hg init repo3
849 $ hg init repo3
849 $ cd repo3
850 $ cd repo3
850
851
851 $ cat <<EOF >> $HGRCPATH
852 $ cat <<EOF >> $HGRCPATH
852 > [ui]
853 > [ui]
853 > logtemplate = {rev} {desc|firstline} ({files})\n
854 > logtemplate = {rev} {desc|firstline} ({files})\n
854 >
855 >
855 > [extensions]
856 > [extensions]
856 > failafterfinalize = $TESTTMP/failafterfinalize.py
857 > failafterfinalize = $TESTTMP/failafterfinalize.py
857 > EOF
858 > EOF
858
859
859 - test failure with "empty changelog"
860 - test failure with "empty changelog"
860
861
861 $ echo foo > foo
862 $ echo foo > foo
862 $ hg add foo
863 $ hg add foo
863
864
864 (failure before finalization)
865 (failure before finalization)
865
866
866 >>> from hgclient import check, readchannel, runcommand
867 >>> from hgclient import check, readchannel, runcommand
867 >>> @check
868 >>> @check
868 ... def abort(server):
869 ... def abort(server):
869 ... readchannel(server)
870 ... readchannel(server)
870 ... runcommand(server, ['commit',
871 ... runcommand(server, ['commit',
871 ... '--config', 'hooks.pretxncommit=false',
872 ... '--config', 'hooks.pretxncommit=false',
872 ... '-mfoo'])
873 ... '-mfoo'])
873 ... runcommand(server, ['log'])
874 ... runcommand(server, ['log'])
874 ... runcommand(server, ['verify', '-q'])
875 ... runcommand(server, ['verify', '-q'])
875 *** runcommand commit --config hooks.pretxncommit=false -mfoo
876 *** runcommand commit --config hooks.pretxncommit=false -mfoo
876 transaction abort!
877 transaction abort!
877 rollback completed
878 rollback completed
878 abort: pretxncommit hook exited with status 1
879 abort: pretxncommit hook exited with status 1
879 [255]
880 [255]
880 *** runcommand log
881 *** runcommand log
881 *** runcommand verify -q
882 *** runcommand verify -q
882
883
883 (failure after finalization)
884 (failure after finalization)
884
885
885 >>> from hgclient import check, readchannel, runcommand
886 >>> from hgclient import check, readchannel, runcommand
886 >>> @check
887 >>> @check
887 ... def abort(server):
888 ... def abort(server):
888 ... readchannel(server)
889 ... readchannel(server)
889 ... runcommand(server, ['commit',
890 ... runcommand(server, ['commit',
890 ... '--config', 'failafterfinalize.fail=true',
891 ... '--config', 'failafterfinalize.fail=true',
891 ... '-mfoo'])
892 ... '-mfoo'])
892 ... runcommand(server, ['log'])
893 ... runcommand(server, ['log'])
893 ... runcommand(server, ['verify', '-q'])
894 ... runcommand(server, ['verify', '-q'])
894 *** runcommand commit --config failafterfinalize.fail=true -mfoo
895 *** runcommand commit --config failafterfinalize.fail=true -mfoo
895 transaction abort!
896 transaction abort!
896 rollback completed
897 rollback completed
897 abort: fail after finalization
898 abort: fail after finalization
898 [255]
899 [255]
899 *** runcommand log
900 *** runcommand log
900 *** runcommand verify -q
901 *** runcommand verify -q
901
902
902 - test failure with "not-empty changelog"
903 - test failure with "not-empty changelog"
903
904
904 $ echo bar > bar
905 $ echo bar > bar
905 $ hg add bar
906 $ hg add bar
906 $ hg commit -mbar bar
907 $ hg commit -mbar bar
907
908
908 (failure before finalization)
909 (failure before finalization)
909
910
910 >>> from hgclient import check, readchannel, runcommand
911 >>> from hgclient import check, readchannel, runcommand
911 >>> @check
912 >>> @check
912 ... def abort(server):
913 ... def abort(server):
913 ... readchannel(server)
914 ... readchannel(server)
914 ... runcommand(server, ['commit',
915 ... runcommand(server, ['commit',
915 ... '--config', 'hooks.pretxncommit=false',
916 ... '--config', 'hooks.pretxncommit=false',
916 ... '-mfoo', 'foo'])
917 ... '-mfoo', 'foo'])
917 ... runcommand(server, ['log'])
918 ... runcommand(server, ['log'])
918 ... runcommand(server, ['verify', '-q'])
919 ... runcommand(server, ['verify', '-q'])
919 *** runcommand commit --config hooks.pretxncommit=false -mfoo foo
920 *** runcommand commit --config hooks.pretxncommit=false -mfoo foo
920 transaction abort!
921 transaction abort!
921 rollback completed
922 rollback completed
922 abort: pretxncommit hook exited with status 1
923 abort: pretxncommit hook exited with status 1
923 [255]
924 [255]
924 *** runcommand log
925 *** runcommand log
925 0 bar (bar)
926 0 bar (bar)
926 *** runcommand verify -q
927 *** runcommand verify -q
927
928
928 (failure after finalization)
929 (failure after finalization)
929
930
930 >>> from hgclient import check, readchannel, runcommand
931 >>> from hgclient import check, readchannel, runcommand
931 >>> @check
932 >>> @check
932 ... def abort(server):
933 ... def abort(server):
933 ... readchannel(server)
934 ... readchannel(server)
934 ... runcommand(server, ['commit',
935 ... runcommand(server, ['commit',
935 ... '--config', 'failafterfinalize.fail=true',
936 ... '--config', 'failafterfinalize.fail=true',
936 ... '-mfoo', 'foo'])
937 ... '-mfoo', 'foo'])
937 ... runcommand(server, ['log'])
938 ... runcommand(server, ['log'])
938 ... runcommand(server, ['verify', '-q'])
939 ... runcommand(server, ['verify', '-q'])
939 *** runcommand commit --config failafterfinalize.fail=true -mfoo foo
940 *** runcommand commit --config failafterfinalize.fail=true -mfoo foo
940 transaction abort!
941 transaction abort!
941 rollback completed
942 rollback completed
942 abort: fail after finalization
943 abort: fail after finalization
943 [255]
944 [255]
944 *** runcommand log
945 *** runcommand log
945 0 bar (bar)
946 0 bar (bar)
946 *** runcommand verify -q
947 *** runcommand verify -q
947
948
948 $ cd ..
949 $ cd ..
949
950
950 Test symlink traversal over cached audited paths:
951 Test symlink traversal over cached audited paths:
951 -------------------------------------------------
952 -------------------------------------------------
952
953
953 #if symlink
954 #if symlink
954
955
955 set up symlink hell
956 set up symlink hell
956
957
957 $ mkdir merge-symlink-out
958 $ mkdir merge-symlink-out
958 $ hg init merge-symlink
959 $ hg init merge-symlink
959 $ cd merge-symlink
960 $ cd merge-symlink
960 $ touch base
961 $ touch base
961 $ hg commit -qAm base
962 $ hg commit -qAm base
962 $ ln -s ../merge-symlink-out a
963 $ ln -s ../merge-symlink-out a
963 $ hg commit -qAm 'symlink a -> ../merge-symlink-out'
964 $ hg commit -qAm 'symlink a -> ../merge-symlink-out'
964 $ hg up -q 0
965 $ hg up -q 0
965 $ mkdir a
966 $ mkdir a
966 $ touch a/poisoned
967 $ touch a/poisoned
967 $ hg commit -qAm 'file a/poisoned'
968 $ hg commit -qAm 'file a/poisoned'
968 $ hg log -G -T '{rev}: {desc}\n'
969 $ hg log -G -T '{rev}: {desc}\n'
969 @ 2: file a/poisoned
970 @ 2: file a/poisoned
970 |
971 |
971 | o 1: symlink a -> ../merge-symlink-out
972 | o 1: symlink a -> ../merge-symlink-out
972 |/
973 |/
973 o 0: base
974 o 0: base
974
975
975
976
976 try trivial merge after update: cache of audited paths should be discarded,
977 try trivial merge after update: cache of audited paths should be discarded,
977 and the merge should fail (issue5628)
978 and the merge should fail (issue5628)
978
979
979 $ hg up -q null
980 $ hg up -q null
980 >>> from hgclient import check, readchannel, runcommand
981 >>> from hgclient import check, readchannel, runcommand
981 >>> @check
982 >>> @check
982 ... def merge(server):
983 ... def merge(server):
983 ... readchannel(server)
984 ... readchannel(server)
984 ... # audit a/poisoned as a good path
985 ... # audit a/poisoned as a good path
985 ... runcommand(server, ['up', '-qC', '2'])
986 ... runcommand(server, ['up', '-qC', '2'])
986 ... runcommand(server, ['up', '-qC', '1'])
987 ... runcommand(server, ['up', '-qC', '1'])
987 ... # here a is a symlink, so a/poisoned is bad
988 ... # here a is a symlink, so a/poisoned is bad
988 ... runcommand(server, ['merge', '2'])
989 ... runcommand(server, ['merge', '2'])
989 *** runcommand up -qC 2
990 *** runcommand up -qC 2
990 *** runcommand up -qC 1
991 *** runcommand up -qC 1
991 *** runcommand merge 2
992 *** runcommand merge 2
992 abort: path 'a/poisoned' traverses symbolic link 'a'
993 abort: path 'a/poisoned' traverses symbolic link 'a'
993 [255]
994 [255]
994 $ ls ../merge-symlink-out
995 $ ls ../merge-symlink-out
995
996
996 cache of repo.auditor should be discarded, so matcher would never traverse
997 cache of repo.auditor should be discarded, so matcher would never traverse
997 symlinks:
998 symlinks:
998
999
999 $ hg up -qC 0
1000 $ hg up -qC 0
1000 $ touch ../merge-symlink-out/poisoned
1001 $ touch ../merge-symlink-out/poisoned
1001 >>> from hgclient import check, readchannel, runcommand
1002 >>> from hgclient import check, readchannel, runcommand
1002 >>> @check
1003 >>> @check
1003 ... def files(server):
1004 ... def files(server):
1004 ... readchannel(server)
1005 ... readchannel(server)
1005 ... runcommand(server, ['up', '-qC', '2'])
1006 ... runcommand(server, ['up', '-qC', '2'])
1006 ... # audit a/poisoned as a good path
1007 ... # audit a/poisoned as a good path
1007 ... runcommand(server, ['files', 'a/poisoned'])
1008 ... runcommand(server, ['files', 'a/poisoned'])
1008 ... runcommand(server, ['up', '-qC', '0'])
1009 ... runcommand(server, ['up', '-qC', '0'])
1009 ... runcommand(server, ['up', '-qC', '1'])
1010 ... runcommand(server, ['up', '-qC', '1'])
1010 ... # here 'a' is a symlink, so a/poisoned should be warned
1011 ... # here 'a' is a symlink, so a/poisoned should be warned
1011 ... runcommand(server, ['files', 'a/poisoned'])
1012 ... runcommand(server, ['files', 'a/poisoned'])
1012 *** runcommand up -qC 2
1013 *** runcommand up -qC 2
1013 *** runcommand files a/poisoned
1014 *** runcommand files a/poisoned
1014 a/poisoned
1015 a/poisoned
1015 *** runcommand up -qC 0
1016 *** runcommand up -qC 0
1016 *** runcommand up -qC 1
1017 *** runcommand up -qC 1
1017 *** runcommand files a/poisoned
1018 *** runcommand files a/poisoned
1018 abort: path 'a/poisoned' traverses symbolic link 'a'
1019 abort: path 'a/poisoned' traverses symbolic link 'a'
1019 [255]
1020 [255]
1020
1021
1021 $ cd ..
1022 $ cd ..
1022
1023
1023 #endif
1024 #endif
@@ -1,126 +1,132 b''
1 #require lfs-test-server
1 #require lfs-test-server
2
2
3 $ LFS_LISTEN="tcp://:$HGPORT"
3 $ LFS_LISTEN="tcp://:$HGPORT"
4 $ LFS_HOST="localhost:$HGPORT"
4 $ LFS_HOST="localhost:$HGPORT"
5 $ LFS_PUBLIC=1
5 $ LFS_PUBLIC=1
6 $ export LFS_LISTEN LFS_HOST LFS_PUBLIC
6 $ export LFS_LISTEN LFS_HOST LFS_PUBLIC
7 #if no-windows
7 #if no-windows
8 $ lfs-test-server &> lfs-server.log &
8 $ lfs-test-server &> lfs-server.log &
9 $ echo $! >> $DAEMON_PIDS
9 $ echo $! >> $DAEMON_PIDS
10 #else
10 #else
11 $ cat >> $TESTTMP/spawn.py <<EOF
11 $ cat >> $TESTTMP/spawn.py <<EOF
12 > import os
12 > import os
13 > import subprocess
13 > import subprocess
14 > import sys
14 > import sys
15 >
15 >
16 > for path in os.environ["PATH"].split(os.pathsep):
16 > for path in os.environ["PATH"].split(os.pathsep):
17 > exe = os.path.join(path, 'lfs-test-server.exe')
17 > exe = os.path.join(path, 'lfs-test-server.exe')
18 > if os.path.exists(exe):
18 > if os.path.exists(exe):
19 > with open('lfs-server.log', 'wb') as out:
19 > with open('lfs-server.log', 'wb') as out:
20 > p = subprocess.Popen(exe, stdout=out, stderr=out)
20 > p = subprocess.Popen(exe, stdout=out, stderr=out)
21 > sys.stdout.write('%s\n' % p.pid)
21 > sys.stdout.write('%s\n' % p.pid)
22 > sys.exit(0)
22 > sys.exit(0)
23 > sys.exit(1)
23 > sys.exit(1)
24 > EOF
24 > EOF
25 $ $PYTHON $TESTTMP/spawn.py >> $DAEMON_PIDS
25 $ $PYTHON $TESTTMP/spawn.py >> $DAEMON_PIDS
26 #endif
26 #endif
27
27
28 $ cat >> $HGRCPATH <<EOF
28 $ cat >> $HGRCPATH <<EOF
29 > [extensions]
29 > [extensions]
30 > lfs=
30 > lfs=
31 > [lfs]
31 > [lfs]
32 > url=http://foo:bar@$LFS_HOST/
32 > url=http://foo:bar@$LFS_HOST/
33 > threshold=1
33 > threshold=1
34 > EOF
34 > EOF
35
35
36 $ hg init repo1
36 $ hg init repo1
37 $ cd repo1
37 $ cd repo1
38 $ echo THIS-IS-LFS > a
38 $ echo THIS-IS-LFS > a
39 $ hg commit -m a -A a
39 $ hg commit -m a -A a
40
40
41 $ hg init ../repo2
41 $ hg init ../repo2
42 $ hg push ../repo2 -v
42 $ hg push ../repo2 -v
43 pushing to ../repo2
43 pushing to ../repo2
44 searching for changes
44 searching for changes
45 lfs: uploading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
45 lfs: uploading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
46 1 changesets found
46 1 changesets found
47 uncompressed size of bundle content:
47 uncompressed size of bundle content:
48 * (changelog) (glob)
48 * (changelog) (glob)
49 * (manifests) (glob)
49 * (manifests) (glob)
50 * a (glob)
50 * a (glob)
51 adding changesets
51 adding changesets
52 adding manifests
52 adding manifests
53 adding file changes
53 adding file changes
54 added 1 changesets with 1 changes to 1 files
54 added 1 changesets with 1 changes to 1 files
55
55
56 Clear the cache to force a download
57 $ rm -rf `hg config lfs.usercache`
56 $ cd ../repo2
58 $ cd ../repo2
57 $ hg update tip -v
59 $ hg update tip -v
58 resolving manifests
60 resolving manifests
59 getting a
61 getting a
60 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
62 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
61 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
63 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
62
64
63 When the server has some blobs already
65 When the server has some blobs already
64
66
65 $ hg mv a b
67 $ hg mv a b
66 $ echo ANOTHER-LARGE-FILE > c
68 $ echo ANOTHER-LARGE-FILE > c
67 $ echo ANOTHER-LARGE-FILE2 > d
69 $ echo ANOTHER-LARGE-FILE2 > d
68 $ hg commit -m b-and-c -A b c d
70 $ hg commit -m b-and-c -A b c d
69 $ hg push ../repo1 -v | grep -v '^ '
71 $ hg push ../repo1 -v | grep -v '^ '
70 pushing to ../repo1
72 pushing to ../repo1
71 searching for changes
73 searching for changes
72 lfs: need to transfer 2 objects (39 bytes)
74 lfs: need to transfer 2 objects (39 bytes)
73 lfs: uploading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
75 lfs: uploading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
74 lfs: uploading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
76 lfs: uploading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
75 1 changesets found
77 1 changesets found
76 uncompressed size of bundle content:
78 uncompressed size of bundle content:
77 adding changesets
79 adding changesets
78 adding manifests
80 adding manifests
79 adding file changes
81 adding file changes
80 added 1 changesets with 3 changes to 3 files
82 added 1 changesets with 3 changes to 3 files
81
83
84 Clear the cache to force a download
85 $ rm -rf `hg config lfs.usercache`
82 $ hg --repo ../repo1 update tip -v
86 $ hg --repo ../repo1 update tip -v
83 resolving manifests
87 resolving manifests
84 getting b
88 getting b
85 getting c
89 getting c
86 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
90 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
87 getting d
91 getting d
88 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
92 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
89 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
93 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
90
94
91 Check error message when the remote missed a blob:
95 Check error message when the remote missed a blob:
92
96
93 $ echo FFFFF > b
97 $ echo FFFFF > b
94 $ hg commit -m b -A b
98 $ hg commit -m b -A b
95 $ echo FFFFF >> b
99 $ echo FFFFF >> b
96 $ hg commit -m b b
100 $ hg commit -m b b
97 $ rm -rf .hg/store/lfs
101 $ rm -rf .hg/store/lfs
102 $ rm -rf `hg config lfs.usercache`
98 $ hg update -C '.^'
103 $ hg update -C '.^'
99 abort: LFS server claims required objects do not exist:
104 abort: LFS server claims required objects do not exist:
100 8e6ea5f6c066b44a0efa43bcce86aea73f17e6e23f0663df0251e7524e140a13!
105 8e6ea5f6c066b44a0efa43bcce86aea73f17e6e23f0663df0251e7524e140a13!
101 [255]
106 [255]
102
107
103 Check error message when object does not exist:
108 Check error message when object does not exist:
104
109
105 $ hg init test && cd test
110 $ hg init test && cd test
106 $ echo "[extensions]" >> .hg/hgrc
111 $ echo "[extensions]" >> .hg/hgrc
107 $ echo "lfs=" >> .hg/hgrc
112 $ echo "lfs=" >> .hg/hgrc
108 $ echo "[lfs]" >> .hg/hgrc
113 $ echo "[lfs]" >> .hg/hgrc
109 $ echo "threshold=1" >> .hg/hgrc
114 $ echo "threshold=1" >> .hg/hgrc
110 $ echo a > a
115 $ echo a > a
111 $ hg add a
116 $ hg add a
112 $ hg commit -m 'test'
117 $ hg commit -m 'test'
113 $ echo aaaaa > a
118 $ echo aaaaa > a
114 $ hg commit -m 'largefile'
119 $ hg commit -m 'largefile'
115 $ hg debugdata .hg/store/data/a.i 1 # verify this is no the file content but includes "oid", the LFS "pointer".
120 $ hg debugdata .hg/store/data/a.i 1 # verify this is no the file content but includes "oid", the LFS "pointer".
116 version https://git-lfs.github.com/spec/v1
121 version https://git-lfs.github.com/spec/v1
117 oid sha256:bdc26931acfb734b142a8d675f205becf27560dc461f501822de13274fe6fc8a
122 oid sha256:bdc26931acfb734b142a8d675f205becf27560dc461f501822de13274fe6fc8a
118 size 6
123 size 6
119 x-is-binary 0
124 x-is-binary 0
120 $ cd ..
125 $ cd ..
126 $ rm -rf `hg config lfs.usercache`
121 $ hg --config 'lfs.url=https://dewey-lfs.vip.facebook.com/lfs' clone test test2
127 $ hg --config 'lfs.url=https://dewey-lfs.vip.facebook.com/lfs' clone test test2
122 updating to branch default
128 updating to branch default
123 abort: LFS server error. Remote object for file data/a.i not found:(.*)! (re)
129 abort: LFS server error. Remote object for file data/a.i not found:(.*)! (re)
124 [255]
130 [255]
125
131
126 $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
132 $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
@@ -1,678 +1,682 b''
1 # Initial setup
1 # Initial setup
2
2
3 $ cat >> $HGRCPATH << EOF
3 $ cat >> $HGRCPATH << EOF
4 > [extensions]
4 > [extensions]
5 > lfs=
5 > lfs=
6 > [lfs]
6 > [lfs]
7 > threshold=1000B
7 > threshold=1000B
8 > EOF
8 > EOF
9
9
10 $ LONG=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
10 $ LONG=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
11
11
12 # Prepare server and enable extension
12 # Prepare server and enable extension
13 $ hg init server
13 $ hg init server
14 $ hg clone -q server client
14 $ hg clone -q server client
15 $ cd client
15 $ cd client
16
16
17 # Commit small file
17 # Commit small file
18 $ echo s > smallfile
18 $ echo s > smallfile
19 $ hg commit -Aqm "add small file"
19 $ hg commit -Aqm "add small file"
20
20
21 # Commit large file
21 # Commit large file
22 $ echo $LONG > largefile
22 $ echo $LONG > largefile
23 $ grep lfs .hg/requires
23 $ grep lfs .hg/requires
24 [1]
24 [1]
25 $ hg commit --traceback -Aqm "add large file"
25 $ hg commit --traceback -Aqm "add large file"
26 $ grep lfs .hg/requires
26 $ grep lfs .hg/requires
27 lfs
27 lfs
28
28
29 # Ensure metadata is stored
29 # Ensure metadata is stored
30 $ hg debugdata largefile 0
30 $ hg debugdata largefile 0
31 version https://git-lfs.github.com/spec/v1
31 version https://git-lfs.github.com/spec/v1
32 oid sha256:f11e77c257047a398492d8d6cb9f6acf3aa7c4384bb23080b43546053e183e4b
32 oid sha256:f11e77c257047a398492d8d6cb9f6acf3aa7c4384bb23080b43546053e183e4b
33 size 1501
33 size 1501
34 x-is-binary 0
34 x-is-binary 0
35
35
36 # Check the blobstore is populated
36 # Check the blobstore is populated
37 $ find .hg/store/lfs/objects | sort
37 $ find .hg/store/lfs/objects | sort
38 .hg/store/lfs/objects
38 .hg/store/lfs/objects
39 .hg/store/lfs/objects/f1
39 .hg/store/lfs/objects/f1
40 .hg/store/lfs/objects/f1/1e77c257047a398492d8d6cb9f6acf3aa7c4384bb23080b43546053e183e4b
40 .hg/store/lfs/objects/f1/1e77c257047a398492d8d6cb9f6acf3aa7c4384bb23080b43546053e183e4b
41
41
42 # Check the blob stored contains the actual contents of the file
42 # Check the blob stored contains the actual contents of the file
43 $ cat .hg/store/lfs/objects/f1/1e77c257047a398492d8d6cb9f6acf3aa7c4384bb23080b43546053e183e4b
43 $ cat .hg/store/lfs/objects/f1/1e77c257047a398492d8d6cb9f6acf3aa7c4384bb23080b43546053e183e4b
44 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
44 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
45
45
46 # Push changes to the server
46 # Push changes to the server
47
47
48 $ hg push
48 $ hg push
49 pushing to $TESTTMP/server (glob)
49 pushing to $TESTTMP/server (glob)
50 searching for changes
50 searching for changes
51 abort: lfs.url needs to be configured
51 abort: lfs.url needs to be configured
52 [255]
52 [255]
53
53
54 $ cat >> $HGRCPATH << EOF
54 $ cat >> $HGRCPATH << EOF
55 > [lfs]
55 > [lfs]
56 > url=file:$TESTTMP/dummy-remote/
56 > url=file:$TESTTMP/dummy-remote/
57 > EOF
57 > EOF
58
58
59 $ hg push -v | egrep -v '^(uncompressed| )'
59 $ hg push -v | egrep -v '^(uncompressed| )'
60 pushing to $TESTTMP/server (glob)
60 pushing to $TESTTMP/server (glob)
61 searching for changes
61 searching for changes
62 2 changesets found
62 2 changesets found
63 adding changesets
63 adding changesets
64 adding manifests
64 adding manifests
65 adding file changes
65 adding file changes
66 added 2 changesets with 2 changes to 2 files
66 added 2 changesets with 2 changes to 2 files
67
67
68 # Unknown URL scheme
68 # Unknown URL scheme
69
69
70 $ hg push --config lfs.url=ftp://foobar
70 $ hg push --config lfs.url=ftp://foobar
71 abort: lfs: unknown url scheme: ftp
71 abort: lfs: unknown url scheme: ftp
72 [255]
72 [255]
73
73
74 $ cd ../
74 $ cd ../
75
75
76 # Initialize new client (not cloning) and setup extension
76 # Initialize new client (not cloning) and setup extension
77 $ hg init client2
77 $ hg init client2
78 $ cd client2
78 $ cd client2
79 $ cat >> .hg/hgrc <<EOF
79 $ cat >> .hg/hgrc <<EOF
80 > [paths]
80 > [paths]
81 > default = $TESTTMP/server
81 > default = $TESTTMP/server
82 > EOF
82 > EOF
83
83
84 # Pull from server
84 # Pull from server
85 $ hg pull default
85 $ hg pull default
86 pulling from $TESTTMP/server (glob)
86 pulling from $TESTTMP/server (glob)
87 requesting all changes
87 requesting all changes
88 adding changesets
88 adding changesets
89 adding manifests
89 adding manifests
90 adding file changes
90 adding file changes
91 added 2 changesets with 2 changes to 2 files
91 added 2 changesets with 2 changes to 2 files
92 new changesets b29ba743f89d:00c137947d30
92 new changesets b29ba743f89d:00c137947d30
93 (run 'hg update' to get a working copy)
93 (run 'hg update' to get a working copy)
94
94
95 # Check the blobstore is not yet populated
95 # Check the blobstore is not yet populated
96 $ [ -d .hg/store/lfs/objects ]
96 $ [ -d .hg/store/lfs/objects ]
97 [1]
97 [1]
98
98
99 # Update to the last revision containing the large file
99 # Update to the last revision containing the large file
100 $ hg update
100 $ hg update
101 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
101 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
102
102
103 # Check the blobstore has been populated on update
103 # Check the blobstore has been populated on update
104 $ find .hg/store/lfs/objects | sort
104 $ find .hg/store/lfs/objects | sort
105 .hg/store/lfs/objects
105 .hg/store/lfs/objects
106 .hg/store/lfs/objects/f1
106 .hg/store/lfs/objects/f1
107 .hg/store/lfs/objects/f1/1e77c257047a398492d8d6cb9f6acf3aa7c4384bb23080b43546053e183e4b
107 .hg/store/lfs/objects/f1/1e77c257047a398492d8d6cb9f6acf3aa7c4384bb23080b43546053e183e4b
108
108
109 # Check the contents of the file are fetched from blobstore when requested
109 # Check the contents of the file are fetched from blobstore when requested
110 $ hg cat -r . largefile
110 $ hg cat -r . largefile
111 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
111 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
112
112
113 # Check the file has been copied in the working copy
113 # Check the file has been copied in the working copy
114 $ cat largefile
114 $ cat largefile
115 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
115 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
116
116
117 $ cd ..
117 $ cd ..
118
118
119 # Check rename, and switch between large and small files
119 # Check rename, and switch between large and small files
120
120
121 $ hg init repo3
121 $ hg init repo3
122 $ cd repo3
122 $ cd repo3
123 $ cat >> .hg/hgrc << EOF
123 $ cat >> .hg/hgrc << EOF
124 > [lfs]
124 > [lfs]
125 > threshold=10B
125 > threshold=10B
126 > EOF
126 > EOF
127
127
128 $ echo LONGER-THAN-TEN-BYTES-WILL-TRIGGER-LFS > large
128 $ echo LONGER-THAN-TEN-BYTES-WILL-TRIGGER-LFS > large
129 $ echo SHORTER > small
129 $ echo SHORTER > small
130 $ hg add . -q
130 $ hg add . -q
131 $ hg commit -m 'commit with lfs content'
131 $ hg commit -m 'commit with lfs content'
132
132
133 $ hg mv large l
133 $ hg mv large l
134 $ hg mv small s
134 $ hg mv small s
135 $ hg commit -m 'renames'
135 $ hg commit -m 'renames'
136
136
137 $ echo SHORT > l
137 $ echo SHORT > l
138 $ echo BECOME-LARGER-FROM-SHORTER > s
138 $ echo BECOME-LARGER-FROM-SHORTER > s
139 $ hg commit -m 'large to small, small to large'
139 $ hg commit -m 'large to small, small to large'
140
140
141 $ echo 1 >> l
141 $ echo 1 >> l
142 $ echo 2 >> s
142 $ echo 2 >> s
143 $ hg commit -m 'random modifications'
143 $ hg commit -m 'random modifications'
144
144
145 $ echo RESTORE-TO-BE-LARGE > l
145 $ echo RESTORE-TO-BE-LARGE > l
146 $ echo SHORTER > s
146 $ echo SHORTER > s
147 $ hg commit -m 'switch large and small again'
147 $ hg commit -m 'switch large and small again'
148
148
149 # Test lfs_files template
149 # Test lfs_files template
150
150
151 $ hg log -r 'all()' -T '{rev} {join(lfs_files, ", ")}\n'
151 $ hg log -r 'all()' -T '{rev} {join(lfs_files, ", ")}\n'
152 0 large
152 0 large
153 1 l
153 1 l
154 2 s
154 2 s
155 3 s
155 3 s
156 4 l
156 4 l
157
157
158 # Push and pull the above repo
158 # Push and pull the above repo
159
159
160 $ hg --cwd .. init repo4
160 $ hg --cwd .. init repo4
161 $ hg push ../repo4
161 $ hg push ../repo4
162 pushing to ../repo4
162 pushing to ../repo4
163 searching for changes
163 searching for changes
164 adding changesets
164 adding changesets
165 adding manifests
165 adding manifests
166 adding file changes
166 adding file changes
167 added 5 changesets with 10 changes to 4 files
167 added 5 changesets with 10 changes to 4 files
168
168
169 $ hg --cwd .. init repo5
169 $ hg --cwd .. init repo5
170 $ hg --cwd ../repo5 pull ../repo3
170 $ hg --cwd ../repo5 pull ../repo3
171 pulling from ../repo3
171 pulling from ../repo3
172 requesting all changes
172 requesting all changes
173 adding changesets
173 adding changesets
174 adding manifests
174 adding manifests
175 adding file changes
175 adding file changes
176 added 5 changesets with 10 changes to 4 files
176 added 5 changesets with 10 changes to 4 files
177 new changesets fd47a419c4f7:5adf850972b9
177 new changesets fd47a419c4f7:5adf850972b9
178 (run 'hg update' to get a working copy)
178 (run 'hg update' to get a working copy)
179
179
180 $ cd ..
180 $ cd ..
181
181
182 # Test clone
182 # Test clone
183
183
184 $ hg init repo6
184 $ hg init repo6
185 $ cd repo6
185 $ cd repo6
186 $ cat >> .hg/hgrc << EOF
186 $ cat >> .hg/hgrc << EOF
187 > [lfs]
187 > [lfs]
188 > threshold=30B
188 > threshold=30B
189 > EOF
189 > EOF
190
190
191 $ echo LARGE-BECAUSE-IT-IS-MORE-THAN-30-BYTES > large
191 $ echo LARGE-BECAUSE-IT-IS-MORE-THAN-30-BYTES > large
192 $ echo SMALL > small
192 $ echo SMALL > small
193 $ hg commit -Aqm 'create a lfs file' large small
193 $ hg commit -Aqm 'create a lfs file' large small
194 $ hg debuglfsupload -r 'all()' -v
194 $ hg debuglfsupload -r 'all()' -v
195
195
196 $ cd ..
196 $ cd ..
197
197
198 $ hg clone repo6 repo7
198 $ hg clone repo6 repo7
199 updating to branch default
199 updating to branch default
200 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
200 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
201 $ cd repo7
201 $ cd repo7
202 $ hg config extensions --debug | grep lfs
202 $ hg config extensions --debug | grep lfs
203 $TESTTMP/repo7/.hg/hgrc:*: extensions.lfs= (glob)
203 $TESTTMP/repo7/.hg/hgrc:*: extensions.lfs= (glob)
204 $ cat large
204 $ cat large
205 LARGE-BECAUSE-IT-IS-MORE-THAN-30-BYTES
205 LARGE-BECAUSE-IT-IS-MORE-THAN-30-BYTES
206 $ cat small
206 $ cat small
207 SMALL
207 SMALL
208
208
209 $ cd ..
209 $ cd ..
210
210
211 $ hg --config extensions.share= share repo7 sharedrepo
211 $ hg --config extensions.share= share repo7 sharedrepo
212 updating working directory
212 updating working directory
213 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
213 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
214 $ hg -R sharedrepo config extensions --debug | grep lfs
214 $ hg -R sharedrepo config extensions --debug | grep lfs
215 $TESTTMP/sharedrepo/.hg/hgrc:*: extensions.lfs= (glob)
215 $TESTTMP/sharedrepo/.hg/hgrc:*: extensions.lfs= (glob)
216
216
217 # Test rename and status
217 # Test rename and status
218
218
219 $ hg init repo8
219 $ hg init repo8
220 $ cd repo8
220 $ cd repo8
221 $ cat >> .hg/hgrc << EOF
221 $ cat >> .hg/hgrc << EOF
222 > [lfs]
222 > [lfs]
223 > threshold=10B
223 > threshold=10B
224 > EOF
224 > EOF
225
225
226 $ echo THIS-IS-LFS-BECAUSE-10-BYTES > a1
226 $ echo THIS-IS-LFS-BECAUSE-10-BYTES > a1
227 $ echo SMALL > a2
227 $ echo SMALL > a2
228 $ hg commit -m a -A a1 a2
228 $ hg commit -m a -A a1 a2
229 $ hg status
229 $ hg status
230 $ hg mv a1 b1
230 $ hg mv a1 b1
231 $ hg mv a2 a1
231 $ hg mv a2 a1
232 $ hg mv b1 a2
232 $ hg mv b1 a2
233 $ hg commit -m b
233 $ hg commit -m b
234 $ hg status
234 $ hg status
235 $ HEADER=$'\1\n'
235 $ HEADER=$'\1\n'
236 $ printf '%sSTART-WITH-HG-FILELOG-METADATA' "$HEADER" > a2
236 $ printf '%sSTART-WITH-HG-FILELOG-METADATA' "$HEADER" > a2
237 $ printf '%sMETA\n' "$HEADER" > a1
237 $ printf '%sMETA\n' "$HEADER" > a1
238 $ hg commit -m meta
238 $ hg commit -m meta
239 $ hg status
239 $ hg status
240 $ hg log -T '{rev}: {file_copies} | {file_dels} | {file_adds}\n'
240 $ hg log -T '{rev}: {file_copies} | {file_dels} | {file_adds}\n'
241 2: | |
241 2: | |
242 1: a1 (a2)a2 (a1) | |
242 1: a1 (a2)a2 (a1) | |
243 0: | | a1 a2
243 0: | | a1 a2
244
244
245 $ for n in a1 a2; do
245 $ for n in a1 a2; do
246 > for r in 0 1 2; do
246 > for r in 0 1 2; do
247 > printf '\n%s @ %s\n' $n $r
247 > printf '\n%s @ %s\n' $n $r
248 > hg debugdata $n $r
248 > hg debugdata $n $r
249 > done
249 > done
250 > done
250 > done
251
251
252 a1 @ 0
252 a1 @ 0
253 version https://git-lfs.github.com/spec/v1
253 version https://git-lfs.github.com/spec/v1
254 oid sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
254 oid sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
255 size 29
255 size 29
256 x-is-binary 0
256 x-is-binary 0
257
257
258 a1 @ 1
258 a1 @ 1
259 \x01 (esc)
259 \x01 (esc)
260 copy: a2
260 copy: a2
261 copyrev: 50470ad23cf937b1f4b9f80bfe54df38e65b50d9
261 copyrev: 50470ad23cf937b1f4b9f80bfe54df38e65b50d9
262 \x01 (esc)
262 \x01 (esc)
263 SMALL
263 SMALL
264
264
265 a1 @ 2
265 a1 @ 2
266 \x01 (esc)
266 \x01 (esc)
267 \x01 (esc)
267 \x01 (esc)
268 \x01 (esc)
268 \x01 (esc)
269 META
269 META
270
270
271 a2 @ 0
271 a2 @ 0
272 SMALL
272 SMALL
273
273
274 a2 @ 1
274 a2 @ 1
275 version https://git-lfs.github.com/spec/v1
275 version https://git-lfs.github.com/spec/v1
276 oid sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
276 oid sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
277 size 29
277 size 29
278 x-hg-copy a1
278 x-hg-copy a1
279 x-hg-copyrev be23af27908a582af43e5cda209a5a9b319de8d4
279 x-hg-copyrev be23af27908a582af43e5cda209a5a9b319de8d4
280 x-is-binary 0
280 x-is-binary 0
281
281
282 a2 @ 2
282 a2 @ 2
283 version https://git-lfs.github.com/spec/v1
283 version https://git-lfs.github.com/spec/v1
284 oid sha256:876dadc86a8542f9798048f2c47f51dbf8e4359aed883e8ec80c5db825f0d943
284 oid sha256:876dadc86a8542f9798048f2c47f51dbf8e4359aed883e8ec80c5db825f0d943
285 size 32
285 size 32
286 x-is-binary 0
286 x-is-binary 0
287
287
288 # Verify commit hashes include rename metadata
288 # Verify commit hashes include rename metadata
289
289
290 $ hg log -T '{rev}:{node|short} {desc}\n'
290 $ hg log -T '{rev}:{node|short} {desc}\n'
291 2:0fae949de7fa meta
291 2:0fae949de7fa meta
292 1:9cd6bdffdac0 b
292 1:9cd6bdffdac0 b
293 0:7f96794915f7 a
293 0:7f96794915f7 a
294
294
295 $ cd ..
295 $ cd ..
296
296
297 # Test bundle
297 # Test bundle
298
298
299 $ hg init repo9
299 $ hg init repo9
300 $ cd repo9
300 $ cd repo9
301 $ cat >> .hg/hgrc << EOF
301 $ cat >> .hg/hgrc << EOF
302 > [lfs]
302 > [lfs]
303 > threshold=10B
303 > threshold=10B
304 > [diff]
304 > [diff]
305 > git=1
305 > git=1
306 > EOF
306 > EOF
307
307
308 $ for i in 0 single two three 4; do
308 $ for i in 0 single two three 4; do
309 > echo 'THIS-IS-LFS-'$i > a
309 > echo 'THIS-IS-LFS-'$i > a
310 > hg commit -m a-$i -A a
310 > hg commit -m a-$i -A a
311 > done
311 > done
312
312
313 $ hg update 2 -q
313 $ hg update 2 -q
314 $ echo 'THIS-IS-LFS-2-CHILD' > a
314 $ echo 'THIS-IS-LFS-2-CHILD' > a
315 $ hg commit -m branching -q
315 $ hg commit -m branching -q
316
316
317 $ hg bundle --base 1 bundle.hg -v
317 $ hg bundle --base 1 bundle.hg -v
318 4 changesets found
318 4 changesets found
319 uncompressed size of bundle content:
319 uncompressed size of bundle content:
320 * (changelog) (glob)
320 * (changelog) (glob)
321 * (manifests) (glob)
321 * (manifests) (glob)
322 * a (glob)
322 * a (glob)
323 $ hg --config extensions.strip= strip -r 2 --no-backup --force -q
323 $ hg --config extensions.strip= strip -r 2 --no-backup --force -q
324 $ hg -R bundle.hg log -p -T '{rev} {desc}\n' a
324 $ hg -R bundle.hg log -p -T '{rev} {desc}\n' a
325 5 branching
325 5 branching
326 diff --git a/a b/a
326 diff --git a/a b/a
327 --- a/a
327 --- a/a
328 +++ b/a
328 +++ b/a
329 @@ -1,1 +1,1 @@
329 @@ -1,1 +1,1 @@
330 -THIS-IS-LFS-two
330 -THIS-IS-LFS-two
331 +THIS-IS-LFS-2-CHILD
331 +THIS-IS-LFS-2-CHILD
332
332
333 4 a-4
333 4 a-4
334 diff --git a/a b/a
334 diff --git a/a b/a
335 --- a/a
335 --- a/a
336 +++ b/a
336 +++ b/a
337 @@ -1,1 +1,1 @@
337 @@ -1,1 +1,1 @@
338 -THIS-IS-LFS-three
338 -THIS-IS-LFS-three
339 +THIS-IS-LFS-4
339 +THIS-IS-LFS-4
340
340
341 3 a-three
341 3 a-three
342 diff --git a/a b/a
342 diff --git a/a b/a
343 --- a/a
343 --- a/a
344 +++ b/a
344 +++ b/a
345 @@ -1,1 +1,1 @@
345 @@ -1,1 +1,1 @@
346 -THIS-IS-LFS-two
346 -THIS-IS-LFS-two
347 +THIS-IS-LFS-three
347 +THIS-IS-LFS-three
348
348
349 2 a-two
349 2 a-two
350 diff --git a/a b/a
350 diff --git a/a b/a
351 --- a/a
351 --- a/a
352 +++ b/a
352 +++ b/a
353 @@ -1,1 +1,1 @@
353 @@ -1,1 +1,1 @@
354 -THIS-IS-LFS-single
354 -THIS-IS-LFS-single
355 +THIS-IS-LFS-two
355 +THIS-IS-LFS-two
356
356
357 1 a-single
357 1 a-single
358 diff --git a/a b/a
358 diff --git a/a b/a
359 --- a/a
359 --- a/a
360 +++ b/a
360 +++ b/a
361 @@ -1,1 +1,1 @@
361 @@ -1,1 +1,1 @@
362 -THIS-IS-LFS-0
362 -THIS-IS-LFS-0
363 +THIS-IS-LFS-single
363 +THIS-IS-LFS-single
364
364
365 0 a-0
365 0 a-0
366 diff --git a/a b/a
366 diff --git a/a b/a
367 new file mode 100644
367 new file mode 100644
368 --- /dev/null
368 --- /dev/null
369 +++ b/a
369 +++ b/a
370 @@ -0,0 +1,1 @@
370 @@ -0,0 +1,1 @@
371 +THIS-IS-LFS-0
371 +THIS-IS-LFS-0
372
372
373 $ hg bundle -R bundle.hg --base 1 bundle-again.hg -q
373 $ hg bundle -R bundle.hg --base 1 bundle-again.hg -q
374 $ hg -R bundle-again.hg log -p -T '{rev} {desc}\n' a
374 $ hg -R bundle-again.hg log -p -T '{rev} {desc}\n' a
375 5 branching
375 5 branching
376 diff --git a/a b/a
376 diff --git a/a b/a
377 --- a/a
377 --- a/a
378 +++ b/a
378 +++ b/a
379 @@ -1,1 +1,1 @@
379 @@ -1,1 +1,1 @@
380 -THIS-IS-LFS-two
380 -THIS-IS-LFS-two
381 +THIS-IS-LFS-2-CHILD
381 +THIS-IS-LFS-2-CHILD
382
382
383 4 a-4
383 4 a-4
384 diff --git a/a b/a
384 diff --git a/a b/a
385 --- a/a
385 --- a/a
386 +++ b/a
386 +++ b/a
387 @@ -1,1 +1,1 @@
387 @@ -1,1 +1,1 @@
388 -THIS-IS-LFS-three
388 -THIS-IS-LFS-three
389 +THIS-IS-LFS-4
389 +THIS-IS-LFS-4
390
390
391 3 a-three
391 3 a-three
392 diff --git a/a b/a
392 diff --git a/a b/a
393 --- a/a
393 --- a/a
394 +++ b/a
394 +++ b/a
395 @@ -1,1 +1,1 @@
395 @@ -1,1 +1,1 @@
396 -THIS-IS-LFS-two
396 -THIS-IS-LFS-two
397 +THIS-IS-LFS-three
397 +THIS-IS-LFS-three
398
398
399 2 a-two
399 2 a-two
400 diff --git a/a b/a
400 diff --git a/a b/a
401 --- a/a
401 --- a/a
402 +++ b/a
402 +++ b/a
403 @@ -1,1 +1,1 @@
403 @@ -1,1 +1,1 @@
404 -THIS-IS-LFS-single
404 -THIS-IS-LFS-single
405 +THIS-IS-LFS-two
405 +THIS-IS-LFS-two
406
406
407 1 a-single
407 1 a-single
408 diff --git a/a b/a
408 diff --git a/a b/a
409 --- a/a
409 --- a/a
410 +++ b/a
410 +++ b/a
411 @@ -1,1 +1,1 @@
411 @@ -1,1 +1,1 @@
412 -THIS-IS-LFS-0
412 -THIS-IS-LFS-0
413 +THIS-IS-LFS-single
413 +THIS-IS-LFS-single
414
414
415 0 a-0
415 0 a-0
416 diff --git a/a b/a
416 diff --git a/a b/a
417 new file mode 100644
417 new file mode 100644
418 --- /dev/null
418 --- /dev/null
419 +++ b/a
419 +++ b/a
420 @@ -0,0 +1,1 @@
420 @@ -0,0 +1,1 @@
421 +THIS-IS-LFS-0
421 +THIS-IS-LFS-0
422
422
423 $ cd ..
423 $ cd ..
424
424
425 # Test isbinary
425 # Test isbinary
426
426
427 $ hg init repo10
427 $ hg init repo10
428 $ cd repo10
428 $ cd repo10
429 $ cat >> .hg/hgrc << EOF
429 $ cat >> .hg/hgrc << EOF
430 > [extensions]
430 > [extensions]
431 > lfs=
431 > lfs=
432 > [lfs]
432 > [lfs]
433 > threshold=1
433 > threshold=1
434 > EOF
434 > EOF
435 $ $PYTHON <<'EOF'
435 $ $PYTHON <<'EOF'
436 > def write(path, content):
436 > def write(path, content):
437 > with open(path, 'wb') as f:
437 > with open(path, 'wb') as f:
438 > f.write(content)
438 > f.write(content)
439 > write('a', b'\0\0')
439 > write('a', b'\0\0')
440 > write('b', b'\1\n')
440 > write('b', b'\1\n')
441 > write('c', b'\1\n\0')
441 > write('c', b'\1\n\0')
442 > write('d', b'xx')
442 > write('d', b'xx')
443 > EOF
443 > EOF
444 $ hg add a b c d
444 $ hg add a b c d
445 $ hg diff --stat
445 $ hg diff --stat
446 a | Bin
446 a | Bin
447 b | 1 +
447 b | 1 +
448 c | Bin
448 c | Bin
449 d | 1 +
449 d | 1 +
450 4 files changed, 2 insertions(+), 0 deletions(-)
450 4 files changed, 2 insertions(+), 0 deletions(-)
451 $ hg commit -m binarytest
451 $ hg commit -m binarytest
452 $ cat > $TESTTMP/dumpbinary.py << EOF
452 $ cat > $TESTTMP/dumpbinary.py << EOF
453 > def reposetup(ui, repo):
453 > def reposetup(ui, repo):
454 > for n in 'abcd':
454 > for n in 'abcd':
455 > ui.write(('%s: binary=%s\n') % (n, repo['.'][n].isbinary()))
455 > ui.write(('%s: binary=%s\n') % (n, repo['.'][n].isbinary()))
456 > EOF
456 > EOF
457 $ hg --config extensions.dumpbinary=$TESTTMP/dumpbinary.py id --trace
457 $ hg --config extensions.dumpbinary=$TESTTMP/dumpbinary.py id --trace
458 a: binary=True
458 a: binary=True
459 b: binary=False
459 b: binary=False
460 c: binary=True
460 c: binary=True
461 d: binary=False
461 d: binary=False
462 b55353847f02 tip
462 b55353847f02 tip
463
463
464 $ cd ..
464 $ cd ..
465
465
466 # Test fctx.cmp fastpath - diff without LFS blobs
466 # Test fctx.cmp fastpath - diff without LFS blobs
467
467
468 $ hg init repo11
468 $ hg init repo11
469 $ cd repo11
469 $ cd repo11
470 $ cat >> .hg/hgrc <<EOF
470 $ cat >> .hg/hgrc <<EOF
471 > [lfs]
471 > [lfs]
472 > threshold=1
472 > threshold=1
473 > EOF
473 > EOF
474 $ cat > ../patch.diff <<EOF
474 $ cat > ../patch.diff <<EOF
475 > # HG changeset patch
475 > # HG changeset patch
476 > 2
476 > 2
477 >
477 >
478 > diff --git a/a b/a
478 > diff --git a/a b/a
479 > old mode 100644
479 > old mode 100644
480 > new mode 100755
480 > new mode 100755
481 > EOF
481 > EOF
482
482
483 $ for i in 1 2 3; do
483 $ for i in 1 2 3; do
484 > cp ../repo10/a a
484 > cp ../repo10/a a
485 > if [ $i = 3 ]; then
485 > if [ $i = 3 ]; then
486 > # make a content-only change
486 > # make a content-only change
487 > hg import -q --bypass ../patch.diff
487 > hg import -q --bypass ../patch.diff
488 > hg update -q
488 > hg update -q
489 > rm ../patch.diff
489 > rm ../patch.diff
490 > else
490 > else
491 > echo $i >> a
491 > echo $i >> a
492 > hg commit -m $i -A a
492 > hg commit -m $i -A a
493 > fi
493 > fi
494 > done
494 > done
495 $ [ -d .hg/store/lfs/objects ]
495 $ [ -d .hg/store/lfs/objects ]
496
496
497 $ cd ..
497 $ cd ..
498
498
499 $ hg clone repo11 repo12 --noupdate
499 $ hg clone repo11 repo12 --noupdate
500 $ cd repo12
500 $ cd repo12
501 $ hg log --removed -p a -T '{desc}\n' --config diff.nobinary=1 --git
501 $ hg log --removed -p a -T '{desc}\n' --config diff.nobinary=1 --git
502 2
502 2
503 diff --git a/a b/a
503 diff --git a/a b/a
504 old mode 100644
504 old mode 100644
505 new mode 100755
505 new mode 100755
506
506
507 2
507 2
508 diff --git a/a b/a
508 diff --git a/a b/a
509 Binary file a has changed
509 Binary file a has changed
510
510
511 1
511 1
512 diff --git a/a b/a
512 diff --git a/a b/a
513 new file mode 100644
513 new file mode 100644
514 Binary file a has changed
514 Binary file a has changed
515
515
516 $ [ -d .hg/store/lfs/objects ]
516 $ [ -d .hg/store/lfs/objects ]
517 [1]
517 [1]
518
518
519 $ cd ..
519 $ cd ..
520
520
521 # Verify the repos
521 # Verify the repos
522
522
523 $ cat > $TESTTMP/dumpflog.py << EOF
523 $ cat > $TESTTMP/dumpflog.py << EOF
524 > # print raw revision sizes, flags, and hashes for certain files
524 > # print raw revision sizes, flags, and hashes for certain files
525 > import hashlib
525 > import hashlib
526 > from mercurial import revlog
526 > from mercurial import revlog
527 > from mercurial.node import short
527 > from mercurial.node import short
528 > def hash(rawtext):
528 > def hash(rawtext):
529 > h = hashlib.sha512()
529 > h = hashlib.sha512()
530 > h.update(rawtext)
530 > h.update(rawtext)
531 > return h.hexdigest()[:4]
531 > return h.hexdigest()[:4]
532 > def reposetup(ui, repo):
532 > def reposetup(ui, repo):
533 > # these 2 files are interesting
533 > # these 2 files are interesting
534 > for name in ['l', 's']:
534 > for name in ['l', 's']:
535 > fl = repo.file(name)
535 > fl = repo.file(name)
536 > if len(fl) == 0:
536 > if len(fl) == 0:
537 > continue
537 > continue
538 > sizes = [revlog.revlog.rawsize(fl, i) for i in fl]
538 > sizes = [revlog.revlog.rawsize(fl, i) for i in fl]
539 > texts = [fl.revision(i, raw=True) for i in fl]
539 > texts = [fl.revision(i, raw=True) for i in fl]
540 > flags = [int(fl.flags(i)) for i in fl]
540 > flags = [int(fl.flags(i)) for i in fl]
541 > hashes = [hash(t) for t in texts]
541 > hashes = [hash(t) for t in texts]
542 > print(' %s: rawsizes=%r flags=%r hashes=%r'
542 > print(' %s: rawsizes=%r flags=%r hashes=%r'
543 > % (name, sizes, flags, hashes))
543 > % (name, sizes, flags, hashes))
544 > EOF
544 > EOF
545
545
546 $ for i in client client2 server repo3 repo4 repo5 repo6 repo7 repo8 repo9 \
546 $ for i in client client2 server repo3 repo4 repo5 repo6 repo7 repo8 repo9 \
547 > repo10; do
547 > repo10; do
548 > echo 'repo:' $i
548 > echo 'repo:' $i
549 > hg --cwd $i verify --config extensions.dumpflog=$TESTTMP/dumpflog.py -q
549 > hg --cwd $i verify --config extensions.dumpflog=$TESTTMP/dumpflog.py -q
550 > done
550 > done
551 repo: client
551 repo: client
552 repo: client2
552 repo: client2
553 repo: server
553 repo: server
554 repo: repo3
554 repo: repo3
555 l: rawsizes=[211, 6, 8, 141] flags=[8192, 0, 0, 8192] hashes=['d2b8', '948c', 'cc88', '724d']
555 l: rawsizes=[211, 6, 8, 141] flags=[8192, 0, 0, 8192] hashes=['d2b8', '948c', 'cc88', '724d']
556 s: rawsizes=[74, 141, 141, 8] flags=[0, 8192, 8192, 0] hashes=['3c80', 'fce0', '874a', '826b']
556 s: rawsizes=[74, 141, 141, 8] flags=[0, 8192, 8192, 0] hashes=['3c80', 'fce0', '874a', '826b']
557 repo: repo4
557 repo: repo4
558 l: rawsizes=[211, 6, 8, 141] flags=[8192, 0, 0, 8192] hashes=['d2b8', '948c', 'cc88', '724d']
558 l: rawsizes=[211, 6, 8, 141] flags=[8192, 0, 0, 8192] hashes=['d2b8', '948c', 'cc88', '724d']
559 s: rawsizes=[74, 141, 141, 8] flags=[0, 8192, 8192, 0] hashes=['3c80', 'fce0', '874a', '826b']
559 s: rawsizes=[74, 141, 141, 8] flags=[0, 8192, 8192, 0] hashes=['3c80', 'fce0', '874a', '826b']
560 repo: repo5
560 repo: repo5
561 l: rawsizes=[211, 6, 8, 141] flags=[8192, 0, 0, 8192] hashes=['d2b8', '948c', 'cc88', '724d']
561 l: rawsizes=[211, 6, 8, 141] flags=[8192, 0, 0, 8192] hashes=['d2b8', '948c', 'cc88', '724d']
562 s: rawsizes=[74, 141, 141, 8] flags=[0, 8192, 8192, 0] hashes=['3c80', 'fce0', '874a', '826b']
562 s: rawsizes=[74, 141, 141, 8] flags=[0, 8192, 8192, 0] hashes=['3c80', 'fce0', '874a', '826b']
563 repo: repo6
563 repo: repo6
564 repo: repo7
564 repo: repo7
565 repo: repo8
565 repo: repo8
566 repo: repo9
566 repo: repo9
567 repo: repo10
567 repo: repo10
568
568
569 TODO: repo12 doesn't have any cached lfs files. Figure out how to get the
569 repo12 doesn't have any cached lfs files and its source never pushed its
570 unpushed files from repo12's source instead of the remote store, where they
570 files. Therefore, the files don't exist in the remote store. Use the files in
571 don't exist.
571 the user cache.
572
572
573 $ find $TESTTMP/repo12/.hg/store/lfs/objects -type f
573 $ find $TESTTMP/repo12/.hg/store/lfs/objects -type f
574 find: */repo12/.hg/store/lfs/objects': $ENOENT$ (glob)
574 find: */repo12/.hg/store/lfs/objects': $ENOENT$ (glob)
575 [1]
575 [1]
576
576
577 $ hg --config extensions.share= share repo12 repo13
577 $ hg --config extensions.share= share repo12 repo13
578 updating working directory
578 updating working directory
579 abort: $TESTTMP/dummy-remote/09/66faba9a01f6c78082aa45899a4fef732002d0b26404e90093adf1e876ab8d: $ENOTDIR$ (glob)
579 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
580 [255]
580 $ hg -R repo13 -q verify
581
581 $ hg clone repo12 repo14
582 $ hg clone repo12 repo14
582 updating to branch default
583 updating to branch default
583 abort: $TESTTMP/dummy-remote/09/66faba9a01f6c78082aa45899a4fef732002d0b26404e90093adf1e876ab8d: $ENOTDIR$ (glob)
584 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
584 [255]
585 $ hg -R repo14 -q verify
585
586
586 TODO: If the source repo doesn't have the blob (maybe it was pulled or cloned
587 If the source repo doesn't have the blob (maybe it was pulled or cloned with
587 with --noupdate), the blob should be accessible via the global cache to send to
588 --noupdate), the blob is still accessible via the global cache to send to the
588 the remote store.
589 remote store.
589
590
590 $ rm -rf $TESTTMP/repo14/.hg/store/lfs
591 $ rm -rf $TESTTMP/repo14/.hg/store/lfs
591 $ hg init repo15
592 $ hg init repo15
592 $ hg -R repo14 push repo15
593 $ hg -R repo14 push repo15
593 pushing to repo15
594 pushing to repo15
594 searching for changes
595 searching for changes
595 abort: $TESTTMP/repo14/.hg/store/lfs/objects/1c/896a0adcf9262119f4a98216aaa5ca00a58b9a0ce848914a02f9cd876f65a3: $ENOTDIR$ (glob)
596 adding changesets
596 [255]
597 adding manifests
598 adding file changes
599 added 3 changesets with 2 changes to 1 files
600 $ hg -R repo14 -q verify
597
601
598 lfs -> normal -> lfs round trip conversions are possible. The threshold for the
602 lfs -> normal -> lfs round trip conversions are possible. The threshold for the
599 lfs destination is specified here because it was originally listed in the local
603 lfs destination is specified here because it was originally listed in the local
600 .hgrc, and the global one is too high to trigger lfs usage. For lfs -> normal,
604 .hgrc, and the global one is too high to trigger lfs usage. For lfs -> normal,
601 there's no 'lfs' destination repo requirement. For normal -> lfs, there is.
605 there's no 'lfs' destination repo requirement. For normal -> lfs, there is.
602
606
603 XXX: There's not a great way to ensure that the conversion to normal files
607 XXX: There's not a great way to ensure that the conversion to normal files
604 actually converts _everything_ to normal. The extension needs to be loaded for
608 actually converts _everything_ to normal. The extension needs to be loaded for
605 the source, but there's no way to disable it for the destination. The best that
609 the source, but there's no way to disable it for the destination. The best that
606 can be done is to raise the threshold so that lfs isn't used on the destination.
610 can be done is to raise the threshold so that lfs isn't used on the destination.
607 It doesn't like using '!' to unset the value on the command line.
611 It doesn't like using '!' to unset the value on the command line.
608
612
609 $ hg --config extensions.convert= --config lfs.threshold=1000M \
613 $ hg --config extensions.convert= --config lfs.threshold=1000M \
610 > convert repo8 convert_normal
614 > convert repo8 convert_normal
611 initializing destination convert_normal repository
615 initializing destination convert_normal repository
612 scanning source...
616 scanning source...
613 sorting...
617 sorting...
614 converting...
618 converting...
615 2 a
619 2 a
616 1 b
620 1 b
617 0 meta
621 0 meta
618 $ grep 'lfs' convert_normal/.hg/requires
622 $ grep 'lfs' convert_normal/.hg/requires
619 [1]
623 [1]
620 $ hg --cwd convert_normal debugdata a1 0
624 $ hg --cwd convert_normal debugdata a1 0
621 THIS-IS-LFS-BECAUSE-10-BYTES
625 THIS-IS-LFS-BECAUSE-10-BYTES
622
626
623 $ hg --config extensions.convert= --config lfs.threshold=10B \
627 $ hg --config extensions.convert= --config lfs.threshold=10B \
624 > convert convert_normal convert_lfs
628 > convert convert_normal convert_lfs
625 initializing destination convert_lfs repository
629 initializing destination convert_lfs repository
626 scanning source...
630 scanning source...
627 sorting...
631 sorting...
628 converting...
632 converting...
629 2 a
633 2 a
630 1 b
634 1 b
631 0 meta
635 0 meta
632 $ hg --cwd convert_lfs debugdata a1 0
636 $ hg --cwd convert_lfs debugdata a1 0
633 version https://git-lfs.github.com/spec/v1
637 version https://git-lfs.github.com/spec/v1
634 oid sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
638 oid sha256:5bb8341bee63b3649f222b2215bde37322bea075a30575aa685d8f8d21c77024
635 size 29
639 size 29
636 x-is-binary 0
640 x-is-binary 0
637 $ grep 'lfs' convert_lfs/.hg/requires
641 $ grep 'lfs' convert_lfs/.hg/requires
638 lfs
642 lfs
639
643
640 This convert is trickier, because it contains deleted files (via `hg mv`)
644 This convert is trickier, because it contains deleted files (via `hg mv`)
641
645
642 $ hg --config extensions.convert= --config lfs.threshold=1000M \
646 $ hg --config extensions.convert= --config lfs.threshold=1000M \
643 > convert repo3 convert_normal2
647 > convert repo3 convert_normal2
644 initializing destination convert_normal2 repository
648 initializing destination convert_normal2 repository
645 scanning source...
649 scanning source...
646 sorting...
650 sorting...
647 converting...
651 converting...
648 4 commit with lfs content
652 4 commit with lfs content
649 3 renames
653 3 renames
650 2 large to small, small to large
654 2 large to small, small to large
651 1 random modifications
655 1 random modifications
652 0 switch large and small again
656 0 switch large and small again
653 $ grep 'lfs' convert_normal2/.hg/requires
657 $ grep 'lfs' convert_normal2/.hg/requires
654 [1]
658 [1]
655 $ hg --cwd convert_normal2 debugdata large 0
659 $ hg --cwd convert_normal2 debugdata large 0
656 LONGER-THAN-TEN-BYTES-WILL-TRIGGER-LFS
660 LONGER-THAN-TEN-BYTES-WILL-TRIGGER-LFS
657
661
658 $ hg --config extensions.convert= --config lfs.threshold=10B \
662 $ hg --config extensions.convert= --config lfs.threshold=10B \
659 > convert convert_normal2 convert_lfs2
663 > convert convert_normal2 convert_lfs2
660 initializing destination convert_lfs2 repository
664 initializing destination convert_lfs2 repository
661 scanning source...
665 scanning source...
662 sorting...
666 sorting...
663 converting...
667 converting...
664 4 commit with lfs content
668 4 commit with lfs content
665 3 renames
669 3 renames
666 2 large to small, small to large
670 2 large to small, small to large
667 1 random modifications
671 1 random modifications
668 0 switch large and small again
672 0 switch large and small again
669 $ grep 'lfs' convert_lfs2/.hg/requires
673 $ grep 'lfs' convert_lfs2/.hg/requires
670 lfs
674 lfs
671 $ hg --cwd convert_lfs2 debugdata large 0
675 $ hg --cwd convert_lfs2 debugdata large 0
672 version https://git-lfs.github.com/spec/v1
676 version https://git-lfs.github.com/spec/v1
673 oid sha256:66100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e
677 oid sha256:66100b384bf761271b407d79fc30cdd0554f3b2c5d944836e936d584b88ce88e
674 size 39
678 size 39
675 x-is-binary 0
679 x-is-binary 0
676
680
677 $ hg -R convert_lfs2 config --debug extensions | grep lfs
681 $ hg -R convert_lfs2 config --debug extensions | grep lfs
678 $TESTTMP/convert_lfs2/.hg/hgrc:*: extensions.lfs= (glob)
682 $TESTTMP/convert_lfs2/.hg/hgrc:*: extensions.lfs= (glob)
General Comments 0
You need to be logged in to leave comments. Login now