##// END OF EJS Templates
archive: raise error.Abort if the file pattern matches no files...
Angel Ezquerra -
r18967:88d1b59f default
parent child Browse files
Show More
@@ -1,307 +1,313 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex
9 from node import hex
10 import match as matchmod
10 import match as matchmod
11 import cmdutil
11 import cmdutil
12 import scmutil, util, encoding
12 import scmutil, util, encoding
13 import cStringIO, os, tarfile, time, zipfile
13 import cStringIO, os, tarfile, time, zipfile
14 import zlib, gzip
14 import zlib, gzip
15 import struct
15 import struct
16 import error
16
17
17 # from unzip source code:
18 # from unzip source code:
18 _UNX_IFREG = 0x8000
19 _UNX_IFREG = 0x8000
19 _UNX_IFLNK = 0xa000
20 _UNX_IFLNK = 0xa000
20
21
21 def tidyprefix(dest, kind, prefix):
22 def tidyprefix(dest, kind, prefix):
22 '''choose prefix to use for names in archive. make sure prefix is
23 '''choose prefix to use for names in archive. make sure prefix is
23 safe for consumers.'''
24 safe for consumers.'''
24
25
25 if prefix:
26 if prefix:
26 prefix = util.normpath(prefix)
27 prefix = util.normpath(prefix)
27 else:
28 else:
28 if not isinstance(dest, str):
29 if not isinstance(dest, str):
29 raise ValueError('dest must be string if no prefix')
30 raise ValueError('dest must be string if no prefix')
30 prefix = os.path.basename(dest)
31 prefix = os.path.basename(dest)
31 lower = prefix.lower()
32 lower = prefix.lower()
32 for sfx in exts.get(kind, []):
33 for sfx in exts.get(kind, []):
33 if lower.endswith(sfx):
34 if lower.endswith(sfx):
34 prefix = prefix[:-len(sfx)]
35 prefix = prefix[:-len(sfx)]
35 break
36 break
36 lpfx = os.path.normpath(util.localpath(prefix))
37 lpfx = os.path.normpath(util.localpath(prefix))
37 prefix = util.pconvert(lpfx)
38 prefix = util.pconvert(lpfx)
38 if not prefix.endswith('/'):
39 if not prefix.endswith('/'):
39 prefix += '/'
40 prefix += '/'
40 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
41 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
41 raise util.Abort(_('archive prefix contains illegal components'))
42 raise util.Abort(_('archive prefix contains illegal components'))
42 return prefix
43 return prefix
43
44
44 exts = {
45 exts = {
45 'tar': ['.tar'],
46 'tar': ['.tar'],
46 'tbz2': ['.tbz2', '.tar.bz2'],
47 'tbz2': ['.tbz2', '.tar.bz2'],
47 'tgz': ['.tgz', '.tar.gz'],
48 'tgz': ['.tgz', '.tar.gz'],
48 'zip': ['.zip'],
49 'zip': ['.zip'],
49 }
50 }
50
51
51 def guesskind(dest):
52 def guesskind(dest):
52 for kind, extensions in exts.iteritems():
53 for kind, extensions in exts.iteritems():
53 if util.any(dest.endswith(ext) for ext in extensions):
54 if util.any(dest.endswith(ext) for ext in extensions):
54 return kind
55 return kind
55 return None
56 return None
56
57
57
58
58 class tarit(object):
59 class tarit(object):
59 '''write archive to tar file or stream. can write uncompressed,
60 '''write archive to tar file or stream. can write uncompressed,
60 or compress with gzip or bzip2.'''
61 or compress with gzip or bzip2.'''
61
62
62 class GzipFileWithTime(gzip.GzipFile):
63 class GzipFileWithTime(gzip.GzipFile):
63
64
64 def __init__(self, *args, **kw):
65 def __init__(self, *args, **kw):
65 timestamp = None
66 timestamp = None
66 if 'timestamp' in kw:
67 if 'timestamp' in kw:
67 timestamp = kw.pop('timestamp')
68 timestamp = kw.pop('timestamp')
68 if timestamp is None:
69 if timestamp is None:
69 self.timestamp = time.time()
70 self.timestamp = time.time()
70 else:
71 else:
71 self.timestamp = timestamp
72 self.timestamp = timestamp
72 gzip.GzipFile.__init__(self, *args, **kw)
73 gzip.GzipFile.__init__(self, *args, **kw)
73
74
74 def _write_gzip_header(self):
75 def _write_gzip_header(self):
75 self.fileobj.write('\037\213') # magic header
76 self.fileobj.write('\037\213') # magic header
76 self.fileobj.write('\010') # compression method
77 self.fileobj.write('\010') # compression method
77 # Python 2.6 introduced self.name and deprecated self.filename
78 # Python 2.6 introduced self.name and deprecated self.filename
78 try:
79 try:
79 fname = self.name
80 fname = self.name
80 except AttributeError:
81 except AttributeError:
81 fname = self.filename
82 fname = self.filename
82 if fname and fname.endswith('.gz'):
83 if fname and fname.endswith('.gz'):
83 fname = fname[:-3]
84 fname = fname[:-3]
84 flags = 0
85 flags = 0
85 if fname:
86 if fname:
86 flags = gzip.FNAME
87 flags = gzip.FNAME
87 self.fileobj.write(chr(flags))
88 self.fileobj.write(chr(flags))
88 gzip.write32u(self.fileobj, long(self.timestamp))
89 gzip.write32u(self.fileobj, long(self.timestamp))
89 self.fileobj.write('\002')
90 self.fileobj.write('\002')
90 self.fileobj.write('\377')
91 self.fileobj.write('\377')
91 if fname:
92 if fname:
92 self.fileobj.write(fname + '\000')
93 self.fileobj.write(fname + '\000')
93
94
94 def __init__(self, dest, mtime, kind=''):
95 def __init__(self, dest, mtime, kind=''):
95 self.mtime = mtime
96 self.mtime = mtime
96 self.fileobj = None
97 self.fileobj = None
97
98
98 def taropen(name, mode, fileobj=None):
99 def taropen(name, mode, fileobj=None):
99 if kind == 'gz':
100 if kind == 'gz':
100 mode = mode[0]
101 mode = mode[0]
101 if not fileobj:
102 if not fileobj:
102 fileobj = open(name, mode + 'b')
103 fileobj = open(name, mode + 'b')
103 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
104 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
104 zlib.Z_BEST_COMPRESSION,
105 zlib.Z_BEST_COMPRESSION,
105 fileobj, timestamp=mtime)
106 fileobj, timestamp=mtime)
106 self.fileobj = gzfileobj
107 self.fileobj = gzfileobj
107 return tarfile.TarFile.taropen(name, mode, gzfileobj)
108 return tarfile.TarFile.taropen(name, mode, gzfileobj)
108 else:
109 else:
109 return tarfile.open(name, mode + kind, fileobj)
110 return tarfile.open(name, mode + kind, fileobj)
110
111
111 if isinstance(dest, str):
112 if isinstance(dest, str):
112 self.z = taropen(dest, mode='w:')
113 self.z = taropen(dest, mode='w:')
113 else:
114 else:
114 # Python 2.5-2.5.1 have a regression that requires a name arg
115 # Python 2.5-2.5.1 have a regression that requires a name arg
115 self.z = taropen(name='', mode='w|', fileobj=dest)
116 self.z = taropen(name='', mode='w|', fileobj=dest)
116
117
117 def addfile(self, name, mode, islink, data):
118 def addfile(self, name, mode, islink, data):
118 i = tarfile.TarInfo(name)
119 i = tarfile.TarInfo(name)
119 i.mtime = self.mtime
120 i.mtime = self.mtime
120 i.size = len(data)
121 i.size = len(data)
121 if islink:
122 if islink:
122 i.type = tarfile.SYMTYPE
123 i.type = tarfile.SYMTYPE
123 i.mode = 0777
124 i.mode = 0777
124 i.linkname = data
125 i.linkname = data
125 data = None
126 data = None
126 i.size = 0
127 i.size = 0
127 else:
128 else:
128 i.mode = mode
129 i.mode = mode
129 data = cStringIO.StringIO(data)
130 data = cStringIO.StringIO(data)
130 self.z.addfile(i, data)
131 self.z.addfile(i, data)
131
132
132 def done(self):
133 def done(self):
133 self.z.close()
134 self.z.close()
134 if self.fileobj:
135 if self.fileobj:
135 self.fileobj.close()
136 self.fileobj.close()
136
137
137 class tellable(object):
138 class tellable(object):
138 '''provide tell method for zipfile.ZipFile when writing to http
139 '''provide tell method for zipfile.ZipFile when writing to http
139 response file object.'''
140 response file object.'''
140
141
141 def __init__(self, fp):
142 def __init__(self, fp):
142 self.fp = fp
143 self.fp = fp
143 self.offset = 0
144 self.offset = 0
144
145
145 def __getattr__(self, key):
146 def __getattr__(self, key):
146 return getattr(self.fp, key)
147 return getattr(self.fp, key)
147
148
148 def write(self, s):
149 def write(self, s):
149 self.fp.write(s)
150 self.fp.write(s)
150 self.offset += len(s)
151 self.offset += len(s)
151
152
152 def tell(self):
153 def tell(self):
153 return self.offset
154 return self.offset
154
155
155 class zipit(object):
156 class zipit(object):
156 '''write archive to zip file or stream. can write uncompressed,
157 '''write archive to zip file or stream. can write uncompressed,
157 or compressed with deflate.'''
158 or compressed with deflate.'''
158
159
159 def __init__(self, dest, mtime, compress=True):
160 def __init__(self, dest, mtime, compress=True):
160 if not isinstance(dest, str):
161 if not isinstance(dest, str):
161 try:
162 try:
162 dest.tell()
163 dest.tell()
163 except (AttributeError, IOError):
164 except (AttributeError, IOError):
164 dest = tellable(dest)
165 dest = tellable(dest)
165 self.z = zipfile.ZipFile(dest, 'w',
166 self.z = zipfile.ZipFile(dest, 'w',
166 compress and zipfile.ZIP_DEFLATED or
167 compress and zipfile.ZIP_DEFLATED or
167 zipfile.ZIP_STORED)
168 zipfile.ZIP_STORED)
168
169
169 # Python's zipfile module emits deprecation warnings if we try
170 # Python's zipfile module emits deprecation warnings if we try
170 # to store files with a date before 1980.
171 # to store files with a date before 1980.
171 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
172 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
172 if mtime < epoch:
173 if mtime < epoch:
173 mtime = epoch
174 mtime = epoch
174
175
175 self.mtime = mtime
176 self.mtime = mtime
176 self.date_time = time.gmtime(mtime)[:6]
177 self.date_time = time.gmtime(mtime)[:6]
177
178
178 def addfile(self, name, mode, islink, data):
179 def addfile(self, name, mode, islink, data):
179 i = zipfile.ZipInfo(name, self.date_time)
180 i = zipfile.ZipInfo(name, self.date_time)
180 i.compress_type = self.z.compression
181 i.compress_type = self.z.compression
181 # unzip will not honor unix file modes unless file creator is
182 # unzip will not honor unix file modes unless file creator is
182 # set to unix (id 3).
183 # set to unix (id 3).
183 i.create_system = 3
184 i.create_system = 3
184 ftype = _UNX_IFREG
185 ftype = _UNX_IFREG
185 if islink:
186 if islink:
186 mode = 0777
187 mode = 0777
187 ftype = _UNX_IFLNK
188 ftype = _UNX_IFLNK
188 i.external_attr = (mode | ftype) << 16L
189 i.external_attr = (mode | ftype) << 16L
189 # add "extended-timestamp" extra block, because zip archives
190 # add "extended-timestamp" extra block, because zip archives
190 # without this will be extracted with unexpected timestamp,
191 # without this will be extracted with unexpected timestamp,
191 # if TZ is not configured as GMT
192 # if TZ is not configured as GMT
192 i.extra += struct.pack('<hhBl',
193 i.extra += struct.pack('<hhBl',
193 0x5455, # block type: "extended-timestamp"
194 0x5455, # block type: "extended-timestamp"
194 1 + 4, # size of this block
195 1 + 4, # size of this block
195 1, # "modification time is present"
196 1, # "modification time is present"
196 int(self.mtime)) # last modification (UTC)
197 int(self.mtime)) # last modification (UTC)
197 self.z.writestr(i, data)
198 self.z.writestr(i, data)
198
199
199 def done(self):
200 def done(self):
200 self.z.close()
201 self.z.close()
201
202
202 class fileit(object):
203 class fileit(object):
203 '''write archive as files in directory.'''
204 '''write archive as files in directory.'''
204
205
205 def __init__(self, name, mtime):
206 def __init__(self, name, mtime):
206 self.basedir = name
207 self.basedir = name
207 self.opener = scmutil.opener(self.basedir)
208 self.opener = scmutil.opener(self.basedir)
208
209
209 def addfile(self, name, mode, islink, data):
210 def addfile(self, name, mode, islink, data):
210 if islink:
211 if islink:
211 self.opener.symlink(data, name)
212 self.opener.symlink(data, name)
212 return
213 return
213 f = self.opener(name, "w", atomictemp=True)
214 f = self.opener(name, "w", atomictemp=True)
214 f.write(data)
215 f.write(data)
215 f.close()
216 f.close()
216 destfile = os.path.join(self.basedir, name)
217 destfile = os.path.join(self.basedir, name)
217 os.chmod(destfile, mode)
218 os.chmod(destfile, mode)
218
219
219 def done(self):
220 def done(self):
220 pass
221 pass
221
222
222 archivers = {
223 archivers = {
223 'files': fileit,
224 'files': fileit,
224 'tar': tarit,
225 'tar': tarit,
225 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
226 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
226 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
227 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
227 'uzip': lambda name, mtime: zipit(name, mtime, False),
228 'uzip': lambda name, mtime: zipit(name, mtime, False),
228 'zip': zipit,
229 'zip': zipit,
229 }
230 }
230
231
231 def archive(repo, dest, node, kind, decode=True, matchfn=None,
232 def archive(repo, dest, node, kind, decode=True, matchfn=None,
232 prefix=None, mtime=None, subrepos=False):
233 prefix=None, mtime=None, subrepos=False):
233 '''create archive of repo as it was at node.
234 '''create archive of repo as it was at node.
234
235
235 dest can be name of directory, name of archive file, or file
236 dest can be name of directory, name of archive file, or file
236 object to write archive to.
237 object to write archive to.
237
238
238 kind is type of archive to create.
239 kind is type of archive to create.
239
240
240 decode tells whether to put files through decode filters from
241 decode tells whether to put files through decode filters from
241 hgrc.
242 hgrc.
242
243
243 matchfn is function to filter names of files to write to archive.
244 matchfn is function to filter names of files to write to archive.
244
245
245 prefix is name of path to put before every archive member.'''
246 prefix is name of path to put before every archive member.'''
246
247
247 if kind == 'files':
248 if kind == 'files':
248 if prefix:
249 if prefix:
249 raise util.Abort(_('cannot give prefix when archiving to files'))
250 raise util.Abort(_('cannot give prefix when archiving to files'))
250 else:
251 else:
251 prefix = tidyprefix(dest, kind, prefix)
252 prefix = tidyprefix(dest, kind, prefix)
252
253
253 def write(name, mode, islink, getdata):
254 def write(name, mode, islink, getdata):
254 data = getdata()
255 data = getdata()
255 if decode:
256 if decode:
256 data = repo.wwritedata(name, data)
257 data = repo.wwritedata(name, data)
257 archiver.addfile(prefix + name, mode, islink, data)
258 archiver.addfile(prefix + name, mode, islink, data)
258
259
259 if kind not in archivers:
260 if kind not in archivers:
260 raise util.Abort(_("unknown archive type '%s'") % kind)
261 raise util.Abort(_("unknown archive type '%s'") % kind)
261
262
262 ctx = repo[node]
263 ctx = repo[node]
263 archiver = archivers[kind](dest, mtime or ctx.date()[0])
264 archiver = archivers[kind](dest, mtime or ctx.date()[0])
264
265
265 if repo.ui.configbool("ui", "archivemeta", True):
266 if repo.ui.configbool("ui", "archivemeta", True):
266 def metadata():
267 def metadata():
267 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
268 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
268 repo[0].hex(), hex(node), encoding.fromlocal(ctx.branch()))
269 repo[0].hex(), hex(node), encoding.fromlocal(ctx.branch()))
269
270
270 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
271 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
271 if repo.tagtype(t) == 'global')
272 if repo.tagtype(t) == 'global')
272 if not tags:
273 if not tags:
273 repo.ui.pushbuffer()
274 repo.ui.pushbuffer()
274 opts = {'template': '{latesttag}\n{latesttagdistance}',
275 opts = {'template': '{latesttag}\n{latesttagdistance}',
275 'style': '', 'patch': None, 'git': None}
276 'style': '', 'patch': None, 'git': None}
276 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
277 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
277 ltags, dist = repo.ui.popbuffer().split('\n')
278 ltags, dist = repo.ui.popbuffer().split('\n')
278 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
279 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
279 tags += 'latesttagdistance: %s\n' % dist
280 tags += 'latesttagdistance: %s\n' % dist
280
281
281 return base + tags
282 return base + tags
282
283
283 name = '.hg_archival.txt'
284 name = '.hg_archival.txt'
284 if not matchfn or matchfn(name):
285 if not matchfn or matchfn(name):
285 write(name, 0644, False, metadata)
286 write(name, 0644, False, metadata)
286
287
287 if matchfn:
288 if matchfn:
288 files = [f for f in ctx.manifest().keys() if matchfn(f)]
289 files = [f for f in ctx.manifest().keys() if matchfn(f)]
289 else:
290 else:
290 files = ctx.manifest().keys()
291 files = ctx.manifest().keys()
291 files.sort()
292 total = len(files)
292 total = len(files)
293 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
293 if total:
294 for i, f in enumerate(files):
294 files.sort()
295 ff = ctx.flags(f)
295 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
296 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
296 for i, f in enumerate(files):
297 repo.ui.progress(_('archiving'), i + 1, item=f,
297 ff = ctx.flags(f)
298 unit=_('files'), total=total)
298 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
299 repo.ui.progress(_('archiving'), None)
299 repo.ui.progress(_('archiving'), i + 1, item=f,
300 unit=_('files'), total=total)
301 repo.ui.progress(_('archiving'), None)
300
302
301 if subrepos:
303 if subrepos:
302 for subpath in sorted(ctx.substate):
304 for subpath in sorted(ctx.substate):
303 sub = ctx.sub(subpath)
305 sub = ctx.sub(subpath)
304 submatch = matchmod.narrowmatcher(subpath, matchfn)
306 submatch = matchmod.narrowmatcher(subpath, matchfn)
305 sub.archive(repo.ui, archiver, prefix, submatch)
307 total += sub.archive(repo.ui, archiver, prefix, submatch)
308
309 if total == 0:
310 raise error.Abort(_('no files match the archive pattern'))
306
311
307 archiver.done()
312 archiver.done()
313 return total
@@ -1,1446 +1,1451 b''
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import errno, os, re, xml.dom.minidom, shutil, posixpath, sys
8 import errno, os, re, xml.dom.minidom, shutil, posixpath, sys
9 import stat, subprocess, tarfile
9 import stat, subprocess, tarfile
10 from i18n import _
10 from i18n import _
11 import config, scmutil, util, node, error, cmdutil, bookmarks, match as matchmod
11 import config, scmutil, util, node, error, cmdutil, bookmarks, match as matchmod
12 hg = None
12 hg = None
13 propertycache = util.propertycache
13 propertycache = util.propertycache
14
14
15 nullstate = ('', '', 'empty')
15 nullstate = ('', '', 'empty')
16
16
17 def _expandedabspath(path):
17 def _expandedabspath(path):
18 '''
18 '''
19 get a path or url and if it is a path expand it and return an absolute path
19 get a path or url and if it is a path expand it and return an absolute path
20 '''
20 '''
21 expandedpath = util.urllocalpath(util.expandpath(path))
21 expandedpath = util.urllocalpath(util.expandpath(path))
22 u = util.url(expandedpath)
22 u = util.url(expandedpath)
23 if not u.scheme:
23 if not u.scheme:
24 path = util.normpath(os.path.abspath(u.path))
24 path = util.normpath(os.path.abspath(u.path))
25 return path
25 return path
26
26
27 def _getstorehashcachename(remotepath):
27 def _getstorehashcachename(remotepath):
28 '''get a unique filename for the store hash cache of a remote repository'''
28 '''get a unique filename for the store hash cache of a remote repository'''
29 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
29 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
30
30
31 def _calcfilehash(filename):
31 def _calcfilehash(filename):
32 data = ''
32 data = ''
33 if os.path.exists(filename):
33 if os.path.exists(filename):
34 fd = open(filename)
34 fd = open(filename)
35 data = fd.read()
35 data = fd.read()
36 fd.close()
36 fd.close()
37 return util.sha1(data).hexdigest()
37 return util.sha1(data).hexdigest()
38
38
39 class SubrepoAbort(error.Abort):
39 class SubrepoAbort(error.Abort):
40 """Exception class used to avoid handling a subrepo error more than once"""
40 """Exception class used to avoid handling a subrepo error more than once"""
41 def __init__(self, *args, **kw):
41 def __init__(self, *args, **kw):
42 error.Abort.__init__(self, *args, **kw)
42 error.Abort.__init__(self, *args, **kw)
43 self.subrepo = kw.get('subrepo')
43 self.subrepo = kw.get('subrepo')
44 self.cause = kw.get('cause')
44 self.cause = kw.get('cause')
45
45
46 def annotatesubrepoerror(func):
46 def annotatesubrepoerror(func):
47 def decoratedmethod(self, *args, **kargs):
47 def decoratedmethod(self, *args, **kargs):
48 try:
48 try:
49 res = func(self, *args, **kargs)
49 res = func(self, *args, **kargs)
50 except SubrepoAbort, ex:
50 except SubrepoAbort, ex:
51 # This exception has already been handled
51 # This exception has already been handled
52 raise ex
52 raise ex
53 except error.Abort, ex:
53 except error.Abort, ex:
54 subrepo = subrelpath(self)
54 subrepo = subrelpath(self)
55 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
55 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
56 # avoid handling this exception by raising a SubrepoAbort exception
56 # avoid handling this exception by raising a SubrepoAbort exception
57 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
57 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
58 cause=sys.exc_info())
58 cause=sys.exc_info())
59 return res
59 return res
60 return decoratedmethod
60 return decoratedmethod
61
61
62 def state(ctx, ui):
62 def state(ctx, ui):
63 """return a state dict, mapping subrepo paths configured in .hgsub
63 """return a state dict, mapping subrepo paths configured in .hgsub
64 to tuple: (source from .hgsub, revision from .hgsubstate, kind
64 to tuple: (source from .hgsub, revision from .hgsubstate, kind
65 (key in types dict))
65 (key in types dict))
66 """
66 """
67 p = config.config()
67 p = config.config()
68 def read(f, sections=None, remap=None):
68 def read(f, sections=None, remap=None):
69 if f in ctx:
69 if f in ctx:
70 try:
70 try:
71 data = ctx[f].data()
71 data = ctx[f].data()
72 except IOError, err:
72 except IOError, err:
73 if err.errno != errno.ENOENT:
73 if err.errno != errno.ENOENT:
74 raise
74 raise
75 # handle missing subrepo spec files as removed
75 # handle missing subrepo spec files as removed
76 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
76 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
77 return
77 return
78 p.parse(f, data, sections, remap, read)
78 p.parse(f, data, sections, remap, read)
79 else:
79 else:
80 raise util.Abort(_("subrepo spec file %s not found") % f)
80 raise util.Abort(_("subrepo spec file %s not found") % f)
81
81
82 if '.hgsub' in ctx:
82 if '.hgsub' in ctx:
83 read('.hgsub')
83 read('.hgsub')
84
84
85 for path, src in ui.configitems('subpaths'):
85 for path, src in ui.configitems('subpaths'):
86 p.set('subpaths', path, src, ui.configsource('subpaths', path))
86 p.set('subpaths', path, src, ui.configsource('subpaths', path))
87
87
88 rev = {}
88 rev = {}
89 if '.hgsubstate' in ctx:
89 if '.hgsubstate' in ctx:
90 try:
90 try:
91 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
91 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
92 l = l.lstrip()
92 l = l.lstrip()
93 if not l:
93 if not l:
94 continue
94 continue
95 try:
95 try:
96 revision, path = l.split(" ", 1)
96 revision, path = l.split(" ", 1)
97 except ValueError:
97 except ValueError:
98 raise util.Abort(_("invalid subrepository revision "
98 raise util.Abort(_("invalid subrepository revision "
99 "specifier in .hgsubstate line %d")
99 "specifier in .hgsubstate line %d")
100 % (i + 1))
100 % (i + 1))
101 rev[path] = revision
101 rev[path] = revision
102 except IOError, err:
102 except IOError, err:
103 if err.errno != errno.ENOENT:
103 if err.errno != errno.ENOENT:
104 raise
104 raise
105
105
106 def remap(src):
106 def remap(src):
107 for pattern, repl in p.items('subpaths'):
107 for pattern, repl in p.items('subpaths'):
108 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
108 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
109 # does a string decode.
109 # does a string decode.
110 repl = repl.encode('string-escape')
110 repl = repl.encode('string-escape')
111 # However, we still want to allow back references to go
111 # However, we still want to allow back references to go
112 # through unharmed, so we turn r'\\1' into r'\1'. Again,
112 # through unharmed, so we turn r'\\1' into r'\1'. Again,
113 # extra escapes are needed because re.sub string decodes.
113 # extra escapes are needed because re.sub string decodes.
114 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
114 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
115 try:
115 try:
116 src = re.sub(pattern, repl, src, 1)
116 src = re.sub(pattern, repl, src, 1)
117 except re.error, e:
117 except re.error, e:
118 raise util.Abort(_("bad subrepository pattern in %s: %s")
118 raise util.Abort(_("bad subrepository pattern in %s: %s")
119 % (p.source('subpaths', pattern), e))
119 % (p.source('subpaths', pattern), e))
120 return src
120 return src
121
121
122 state = {}
122 state = {}
123 for path, src in p[''].items():
123 for path, src in p[''].items():
124 kind = 'hg'
124 kind = 'hg'
125 if src.startswith('['):
125 if src.startswith('['):
126 if ']' not in src:
126 if ']' not in src:
127 raise util.Abort(_('missing ] in subrepo source'))
127 raise util.Abort(_('missing ] in subrepo source'))
128 kind, src = src.split(']', 1)
128 kind, src = src.split(']', 1)
129 kind = kind[1:]
129 kind = kind[1:]
130 src = src.lstrip() # strip any extra whitespace after ']'
130 src = src.lstrip() # strip any extra whitespace after ']'
131
131
132 if not util.url(src).isabs():
132 if not util.url(src).isabs():
133 parent = _abssource(ctx._repo, abort=False)
133 parent = _abssource(ctx._repo, abort=False)
134 if parent:
134 if parent:
135 parent = util.url(parent)
135 parent = util.url(parent)
136 parent.path = posixpath.join(parent.path or '', src)
136 parent.path = posixpath.join(parent.path or '', src)
137 parent.path = posixpath.normpath(parent.path)
137 parent.path = posixpath.normpath(parent.path)
138 joined = str(parent)
138 joined = str(parent)
139 # Remap the full joined path and use it if it changes,
139 # Remap the full joined path and use it if it changes,
140 # else remap the original source.
140 # else remap the original source.
141 remapped = remap(joined)
141 remapped = remap(joined)
142 if remapped == joined:
142 if remapped == joined:
143 src = remap(src)
143 src = remap(src)
144 else:
144 else:
145 src = remapped
145 src = remapped
146
146
147 src = remap(src)
147 src = remap(src)
148 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
148 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
149
149
150 return state
150 return state
151
151
152 def writestate(repo, state):
152 def writestate(repo, state):
153 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
153 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
154 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
154 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
155 repo.wwrite('.hgsubstate', ''.join(lines), '')
155 repo.wwrite('.hgsubstate', ''.join(lines), '')
156
156
157 def submerge(repo, wctx, mctx, actx, overwrite):
157 def submerge(repo, wctx, mctx, actx, overwrite):
158 """delegated from merge.applyupdates: merging of .hgsubstate file
158 """delegated from merge.applyupdates: merging of .hgsubstate file
159 in working context, merging context and ancestor context"""
159 in working context, merging context and ancestor context"""
160 if mctx == actx: # backwards?
160 if mctx == actx: # backwards?
161 actx = wctx.p1()
161 actx = wctx.p1()
162 s1 = wctx.substate
162 s1 = wctx.substate
163 s2 = mctx.substate
163 s2 = mctx.substate
164 sa = actx.substate
164 sa = actx.substate
165 sm = {}
165 sm = {}
166
166
167 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
167 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
168
168
169 def debug(s, msg, r=""):
169 def debug(s, msg, r=""):
170 if r:
170 if r:
171 r = "%s:%s:%s" % r
171 r = "%s:%s:%s" % r
172 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
172 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
173
173
174 for s, l in sorted(s1.iteritems()):
174 for s, l in sorted(s1.iteritems()):
175 a = sa.get(s, nullstate)
175 a = sa.get(s, nullstate)
176 ld = l # local state with possible dirty flag for compares
176 ld = l # local state with possible dirty flag for compares
177 if wctx.sub(s).dirty():
177 if wctx.sub(s).dirty():
178 ld = (l[0], l[1] + "+")
178 ld = (l[0], l[1] + "+")
179 if wctx == actx: # overwrite
179 if wctx == actx: # overwrite
180 a = ld
180 a = ld
181
181
182 if s in s2:
182 if s in s2:
183 r = s2[s]
183 r = s2[s]
184 if ld == r or r == a: # no change or local is newer
184 if ld == r or r == a: # no change or local is newer
185 sm[s] = l
185 sm[s] = l
186 continue
186 continue
187 elif ld == a: # other side changed
187 elif ld == a: # other side changed
188 debug(s, "other changed, get", r)
188 debug(s, "other changed, get", r)
189 wctx.sub(s).get(r, overwrite)
189 wctx.sub(s).get(r, overwrite)
190 sm[s] = r
190 sm[s] = r
191 elif ld[0] != r[0]: # sources differ
191 elif ld[0] != r[0]: # sources differ
192 if repo.ui.promptchoice(
192 if repo.ui.promptchoice(
193 _(' subrepository sources for %s differ\n'
193 _(' subrepository sources for %s differ\n'
194 'use (l)ocal source (%s) or (r)emote source (%s)?')
194 'use (l)ocal source (%s) or (r)emote source (%s)?')
195 % (s, l[0], r[0]),
195 % (s, l[0], r[0]),
196 (_('&Local'), _('&Remote')), 0):
196 (_('&Local'), _('&Remote')), 0):
197 debug(s, "prompt changed, get", r)
197 debug(s, "prompt changed, get", r)
198 wctx.sub(s).get(r, overwrite)
198 wctx.sub(s).get(r, overwrite)
199 sm[s] = r
199 sm[s] = r
200 elif ld[1] == a[1]: # local side is unchanged
200 elif ld[1] == a[1]: # local side is unchanged
201 debug(s, "other side changed, get", r)
201 debug(s, "other side changed, get", r)
202 wctx.sub(s).get(r, overwrite)
202 wctx.sub(s).get(r, overwrite)
203 sm[s] = r
203 sm[s] = r
204 else:
204 else:
205 debug(s, "both sides changed, merge with", r)
205 debug(s, "both sides changed, merge with", r)
206 wctx.sub(s).merge(r)
206 wctx.sub(s).merge(r)
207 sm[s] = l
207 sm[s] = l
208 elif ld == a: # remote removed, local unchanged
208 elif ld == a: # remote removed, local unchanged
209 debug(s, "remote removed, remove")
209 debug(s, "remote removed, remove")
210 wctx.sub(s).remove()
210 wctx.sub(s).remove()
211 elif a == nullstate: # not present in remote or ancestor
211 elif a == nullstate: # not present in remote or ancestor
212 debug(s, "local added, keep")
212 debug(s, "local added, keep")
213 sm[s] = l
213 sm[s] = l
214 continue
214 continue
215 else:
215 else:
216 if repo.ui.promptchoice(
216 if repo.ui.promptchoice(
217 _(' local changed subrepository %s which remote removed\n'
217 _(' local changed subrepository %s which remote removed\n'
218 'use (c)hanged version or (d)elete?') % s,
218 'use (c)hanged version or (d)elete?') % s,
219 (_('&Changed'), _('&Delete')), 0):
219 (_('&Changed'), _('&Delete')), 0):
220 debug(s, "prompt remove")
220 debug(s, "prompt remove")
221 wctx.sub(s).remove()
221 wctx.sub(s).remove()
222
222
223 for s, r in sorted(s2.items()):
223 for s, r in sorted(s2.items()):
224 if s in s1:
224 if s in s1:
225 continue
225 continue
226 elif s not in sa:
226 elif s not in sa:
227 debug(s, "remote added, get", r)
227 debug(s, "remote added, get", r)
228 mctx.sub(s).get(r)
228 mctx.sub(s).get(r)
229 sm[s] = r
229 sm[s] = r
230 elif r != sa[s]:
230 elif r != sa[s]:
231 if repo.ui.promptchoice(
231 if repo.ui.promptchoice(
232 _(' remote changed subrepository %s which local removed\n'
232 _(' remote changed subrepository %s which local removed\n'
233 'use (c)hanged version or (d)elete?') % s,
233 'use (c)hanged version or (d)elete?') % s,
234 (_('&Changed'), _('&Delete')), 0) == 0:
234 (_('&Changed'), _('&Delete')), 0) == 0:
235 debug(s, "prompt recreate", r)
235 debug(s, "prompt recreate", r)
236 wctx.sub(s).get(r)
236 wctx.sub(s).get(r)
237 sm[s] = r
237 sm[s] = r
238
238
239 # record merged .hgsubstate
239 # record merged .hgsubstate
240 writestate(repo, sm)
240 writestate(repo, sm)
241
241
242 def _updateprompt(ui, sub, dirty, local, remote):
242 def _updateprompt(ui, sub, dirty, local, remote):
243 if dirty:
243 if dirty:
244 msg = (_(' subrepository sources for %s differ\n'
244 msg = (_(' subrepository sources for %s differ\n'
245 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
245 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
246 % (subrelpath(sub), local, remote))
246 % (subrelpath(sub), local, remote))
247 else:
247 else:
248 msg = (_(' subrepository sources for %s differ (in checked out '
248 msg = (_(' subrepository sources for %s differ (in checked out '
249 'version)\n'
249 'version)\n'
250 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
250 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
251 % (subrelpath(sub), local, remote))
251 % (subrelpath(sub), local, remote))
252 return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
252 return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
253
253
254 def reporelpath(repo):
254 def reporelpath(repo):
255 """return path to this (sub)repo as seen from outermost repo"""
255 """return path to this (sub)repo as seen from outermost repo"""
256 parent = repo
256 parent = repo
257 while util.safehasattr(parent, '_subparent'):
257 while util.safehasattr(parent, '_subparent'):
258 parent = parent._subparent
258 parent = parent._subparent
259 p = parent.root.rstrip(os.sep)
259 p = parent.root.rstrip(os.sep)
260 return repo.root[len(p) + 1:]
260 return repo.root[len(p) + 1:]
261
261
262 def subrelpath(sub):
262 def subrelpath(sub):
263 """return path to this subrepo as seen from outermost repo"""
263 """return path to this subrepo as seen from outermost repo"""
264 if util.safehasattr(sub, '_relpath'):
264 if util.safehasattr(sub, '_relpath'):
265 return sub._relpath
265 return sub._relpath
266 if not util.safehasattr(sub, '_repo'):
266 if not util.safehasattr(sub, '_repo'):
267 return sub._path
267 return sub._path
268 return reporelpath(sub._repo)
268 return reporelpath(sub._repo)
269
269
270 def _abssource(repo, push=False, abort=True):
270 def _abssource(repo, push=False, abort=True):
271 """return pull/push path of repo - either based on parent repo .hgsub info
271 """return pull/push path of repo - either based on parent repo .hgsub info
272 or on the top repo config. Abort or return None if no source found."""
272 or on the top repo config. Abort or return None if no source found."""
273 if util.safehasattr(repo, '_subparent'):
273 if util.safehasattr(repo, '_subparent'):
274 source = util.url(repo._subsource)
274 source = util.url(repo._subsource)
275 if source.isabs():
275 if source.isabs():
276 return str(source)
276 return str(source)
277 source.path = posixpath.normpath(source.path)
277 source.path = posixpath.normpath(source.path)
278 parent = _abssource(repo._subparent, push, abort=False)
278 parent = _abssource(repo._subparent, push, abort=False)
279 if parent:
279 if parent:
280 parent = util.url(util.pconvert(parent))
280 parent = util.url(util.pconvert(parent))
281 parent.path = posixpath.join(parent.path or '', source.path)
281 parent.path = posixpath.join(parent.path or '', source.path)
282 parent.path = posixpath.normpath(parent.path)
282 parent.path = posixpath.normpath(parent.path)
283 return str(parent)
283 return str(parent)
284 else: # recursion reached top repo
284 else: # recursion reached top repo
285 if util.safehasattr(repo, '_subtoppath'):
285 if util.safehasattr(repo, '_subtoppath'):
286 return repo._subtoppath
286 return repo._subtoppath
287 if push and repo.ui.config('paths', 'default-push'):
287 if push and repo.ui.config('paths', 'default-push'):
288 return repo.ui.config('paths', 'default-push')
288 return repo.ui.config('paths', 'default-push')
289 if repo.ui.config('paths', 'default'):
289 if repo.ui.config('paths', 'default'):
290 return repo.ui.config('paths', 'default')
290 return repo.ui.config('paths', 'default')
291 if repo.sharedpath != repo.path:
291 if repo.sharedpath != repo.path:
292 # chop off the .hg component to get the default path form
292 # chop off the .hg component to get the default path form
293 return os.path.dirname(repo.sharedpath)
293 return os.path.dirname(repo.sharedpath)
294 if abort:
294 if abort:
295 raise util.Abort(_("default path for subrepository not found"))
295 raise util.Abort(_("default path for subrepository not found"))
296
296
297 def itersubrepos(ctx1, ctx2):
297 def itersubrepos(ctx1, ctx2):
298 """find subrepos in ctx1 or ctx2"""
298 """find subrepos in ctx1 or ctx2"""
299 # Create a (subpath, ctx) mapping where we prefer subpaths from
299 # Create a (subpath, ctx) mapping where we prefer subpaths from
300 # ctx1. The subpaths from ctx2 are important when the .hgsub file
300 # ctx1. The subpaths from ctx2 are important when the .hgsub file
301 # has been modified (in ctx2) but not yet committed (in ctx1).
301 # has been modified (in ctx2) but not yet committed (in ctx1).
302 subpaths = dict.fromkeys(ctx2.substate, ctx2)
302 subpaths = dict.fromkeys(ctx2.substate, ctx2)
303 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
303 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
304 for subpath, ctx in sorted(subpaths.iteritems()):
304 for subpath, ctx in sorted(subpaths.iteritems()):
305 yield subpath, ctx.sub(subpath)
305 yield subpath, ctx.sub(subpath)
306
306
307 def subrepo(ctx, path):
307 def subrepo(ctx, path):
308 """return instance of the right subrepo class for subrepo in path"""
308 """return instance of the right subrepo class for subrepo in path"""
309 # subrepo inherently violates our import layering rules
309 # subrepo inherently violates our import layering rules
310 # because it wants to make repo objects from deep inside the stack
310 # because it wants to make repo objects from deep inside the stack
311 # so we manually delay the circular imports to not break
311 # so we manually delay the circular imports to not break
312 # scripts that don't use our demand-loading
312 # scripts that don't use our demand-loading
313 global hg
313 global hg
314 import hg as h
314 import hg as h
315 hg = h
315 hg = h
316
316
317 scmutil.pathauditor(ctx._repo.root)(path)
317 scmutil.pathauditor(ctx._repo.root)(path)
318 state = ctx.substate[path]
318 state = ctx.substate[path]
319 if state[2] not in types:
319 if state[2] not in types:
320 raise util.Abort(_('unknown subrepo type %s') % state[2])
320 raise util.Abort(_('unknown subrepo type %s') % state[2])
321 return types[state[2]](ctx, path, state[:2])
321 return types[state[2]](ctx, path, state[:2])
322
322
323 # subrepo classes need to implement the following abstract class:
323 # subrepo classes need to implement the following abstract class:
324
324
325 class abstractsubrepo(object):
325 class abstractsubrepo(object):
326
326
327 def storeclean(self, path):
327 def storeclean(self, path):
328 """
328 """
329 returns true if the repository has not changed since it was last
329 returns true if the repository has not changed since it was last
330 cloned from or pushed to a given repository.
330 cloned from or pushed to a given repository.
331 """
331 """
332 return False
332 return False
333
333
334 def dirty(self, ignoreupdate=False):
334 def dirty(self, ignoreupdate=False):
335 """returns true if the dirstate of the subrepo is dirty or does not
335 """returns true if the dirstate of the subrepo is dirty or does not
336 match current stored state. If ignoreupdate is true, only check
336 match current stored state. If ignoreupdate is true, only check
337 whether the subrepo has uncommitted changes in its dirstate.
337 whether the subrepo has uncommitted changes in its dirstate.
338 """
338 """
339 raise NotImplementedError
339 raise NotImplementedError
340
340
341 def basestate(self):
341 def basestate(self):
342 """current working directory base state, disregarding .hgsubstate
342 """current working directory base state, disregarding .hgsubstate
343 state and working directory modifications"""
343 state and working directory modifications"""
344 raise NotImplementedError
344 raise NotImplementedError
345
345
346 def checknested(self, path):
346 def checknested(self, path):
347 """check if path is a subrepository within this repository"""
347 """check if path is a subrepository within this repository"""
348 return False
348 return False
349
349
350 def commit(self, text, user, date):
350 def commit(self, text, user, date):
351 """commit the current changes to the subrepo with the given
351 """commit the current changes to the subrepo with the given
352 log message. Use given user and date if possible. Return the
352 log message. Use given user and date if possible. Return the
353 new state of the subrepo.
353 new state of the subrepo.
354 """
354 """
355 raise NotImplementedError
355 raise NotImplementedError
356
356
357 def remove(self):
357 def remove(self):
358 """remove the subrepo
358 """remove the subrepo
359
359
360 (should verify the dirstate is not dirty first)
360 (should verify the dirstate is not dirty first)
361 """
361 """
362 raise NotImplementedError
362 raise NotImplementedError
363
363
364 def get(self, state, overwrite=False):
364 def get(self, state, overwrite=False):
365 """run whatever commands are needed to put the subrepo into
365 """run whatever commands are needed to put the subrepo into
366 this state
366 this state
367 """
367 """
368 raise NotImplementedError
368 raise NotImplementedError
369
369
370 def merge(self, state):
370 def merge(self, state):
371 """merge currently-saved state with the new state."""
371 """merge currently-saved state with the new state."""
372 raise NotImplementedError
372 raise NotImplementedError
373
373
374 def push(self, opts):
374 def push(self, opts):
375 """perform whatever action is analogous to 'hg push'
375 """perform whatever action is analogous to 'hg push'
376
376
377 This may be a no-op on some systems.
377 This may be a no-op on some systems.
378 """
378 """
379 raise NotImplementedError
379 raise NotImplementedError
380
380
381 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
381 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
382 return []
382 return []
383
383
384 def status(self, rev2, **opts):
384 def status(self, rev2, **opts):
385 return [], [], [], [], [], [], []
385 return [], [], [], [], [], [], []
386
386
387 def diff(self, ui, diffopts, node2, match, prefix, **opts):
387 def diff(self, ui, diffopts, node2, match, prefix, **opts):
388 pass
388 pass
389
389
390 def outgoing(self, ui, dest, opts):
390 def outgoing(self, ui, dest, opts):
391 return 1
391 return 1
392
392
393 def incoming(self, ui, source, opts):
393 def incoming(self, ui, source, opts):
394 return 1
394 return 1
395
395
396 def files(self):
396 def files(self):
397 """return filename iterator"""
397 """return filename iterator"""
398 raise NotImplementedError
398 raise NotImplementedError
399
399
400 def filedata(self, name):
400 def filedata(self, name):
401 """return file data"""
401 """return file data"""
402 raise NotImplementedError
402 raise NotImplementedError
403
403
404 def fileflags(self, name):
404 def fileflags(self, name):
405 """return file flags"""
405 """return file flags"""
406 return ''
406 return ''
407
407
408 def archive(self, ui, archiver, prefix, match=None):
408 def archive(self, ui, archiver, prefix, match=None):
409 if match is not None:
409 if match is not None:
410 files = [f for f in self.files() if match(f)]
410 files = [f for f in self.files() if match(f)]
411 else:
411 else:
412 files = self.files()
412 files = self.files()
413 total = len(files)
413 total = len(files)
414 relpath = subrelpath(self)
414 relpath = subrelpath(self)
415 ui.progress(_('archiving (%s)') % relpath, 0,
415 ui.progress(_('archiving (%s)') % relpath, 0,
416 unit=_('files'), total=total)
416 unit=_('files'), total=total)
417 for i, name in enumerate(files):
417 for i, name in enumerate(files):
418 flags = self.fileflags(name)
418 flags = self.fileflags(name)
419 mode = 'x' in flags and 0755 or 0644
419 mode = 'x' in flags and 0755 or 0644
420 symlink = 'l' in flags
420 symlink = 'l' in flags
421 archiver.addfile(os.path.join(prefix, self._path, name),
421 archiver.addfile(os.path.join(prefix, self._path, name),
422 mode, symlink, self.filedata(name))
422 mode, symlink, self.filedata(name))
423 ui.progress(_('archiving (%s)') % relpath, i + 1,
423 ui.progress(_('archiving (%s)') % relpath, i + 1,
424 unit=_('files'), total=total)
424 unit=_('files'), total=total)
425 ui.progress(_('archiving (%s)') % relpath, None)
425 ui.progress(_('archiving (%s)') % relpath, None)
426 return total
426
427
427 def walk(self, match):
428 def walk(self, match):
428 '''
429 '''
429 walk recursively through the directory tree, finding all files
430 walk recursively through the directory tree, finding all files
430 matched by the match function
431 matched by the match function
431 '''
432 '''
432 pass
433 pass
433
434
434 def forget(self, ui, match, prefix):
435 def forget(self, ui, match, prefix):
435 return ([], [])
436 return ([], [])
436
437
437 def revert(self, ui, substate, *pats, **opts):
438 def revert(self, ui, substate, *pats, **opts):
438 ui.warn('%s: reverting %s subrepos is unsupported\n' \
439 ui.warn('%s: reverting %s subrepos is unsupported\n' \
439 % (substate[0], substate[2]))
440 % (substate[0], substate[2]))
440 return []
441 return []
441
442
442 class hgsubrepo(abstractsubrepo):
443 class hgsubrepo(abstractsubrepo):
443 def __init__(self, ctx, path, state):
444 def __init__(self, ctx, path, state):
444 self._path = path
445 self._path = path
445 self._state = state
446 self._state = state
446 r = ctx._repo
447 r = ctx._repo
447 root = r.wjoin(path)
448 root = r.wjoin(path)
448 create = False
449 create = False
449 if not os.path.exists(os.path.join(root, '.hg')):
450 if not os.path.exists(os.path.join(root, '.hg')):
450 create = True
451 create = True
451 util.makedirs(root)
452 util.makedirs(root)
452 self._repo = hg.repository(r.baseui, root, create=create)
453 self._repo = hg.repository(r.baseui, root, create=create)
453 for s, k in [('ui', 'commitsubrepos')]:
454 for s, k in [('ui', 'commitsubrepos')]:
454 v = r.ui.config(s, k)
455 v = r.ui.config(s, k)
455 if v:
456 if v:
456 self._repo.ui.setconfig(s, k, v)
457 self._repo.ui.setconfig(s, k, v)
457 self._repo.ui.setconfig('ui', '_usedassubrepo', 'True')
458 self._repo.ui.setconfig('ui', '_usedassubrepo', 'True')
458 self._initrepo(r, state[0], create)
459 self._initrepo(r, state[0], create)
459
460
460 def storeclean(self, path):
461 def storeclean(self, path):
461 clean = True
462 clean = True
462 lock = self._repo.lock()
463 lock = self._repo.lock()
463 itercache = self._calcstorehash(path)
464 itercache = self._calcstorehash(path)
464 try:
465 try:
465 for filehash in self._readstorehashcache(path):
466 for filehash in self._readstorehashcache(path):
466 if filehash != itercache.next():
467 if filehash != itercache.next():
467 clean = False
468 clean = False
468 break
469 break
469 except StopIteration:
470 except StopIteration:
470 # the cached and current pull states have a different size
471 # the cached and current pull states have a different size
471 clean = False
472 clean = False
472 if clean:
473 if clean:
473 try:
474 try:
474 itercache.next()
475 itercache.next()
475 # the cached and current pull states have a different size
476 # the cached and current pull states have a different size
476 clean = False
477 clean = False
477 except StopIteration:
478 except StopIteration:
478 pass
479 pass
479 lock.release()
480 lock.release()
480 return clean
481 return clean
481
482
482 def _calcstorehash(self, remotepath):
483 def _calcstorehash(self, remotepath):
483 '''calculate a unique "store hash"
484 '''calculate a unique "store hash"
484
485
485 This method is used to to detect when there are changes that may
486 This method is used to to detect when there are changes that may
486 require a push to a given remote path.'''
487 require a push to a given remote path.'''
487 # sort the files that will be hashed in increasing (likely) file size
488 # sort the files that will be hashed in increasing (likely) file size
488 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
489 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
489 yield '# %s\n' % _expandedabspath(remotepath)
490 yield '# %s\n' % _expandedabspath(remotepath)
490 for relname in filelist:
491 for relname in filelist:
491 absname = os.path.normpath(self._repo.join(relname))
492 absname = os.path.normpath(self._repo.join(relname))
492 yield '%s = %s\n' % (relname, _calcfilehash(absname))
493 yield '%s = %s\n' % (relname, _calcfilehash(absname))
493
494
494 def _getstorehashcachepath(self, remotepath):
495 def _getstorehashcachepath(self, remotepath):
495 '''get a unique path for the store hash cache'''
496 '''get a unique path for the store hash cache'''
496 return self._repo.join(os.path.join(
497 return self._repo.join(os.path.join(
497 'cache', 'storehash', _getstorehashcachename(remotepath)))
498 'cache', 'storehash', _getstorehashcachename(remotepath)))
498
499
499 def _readstorehashcache(self, remotepath):
500 def _readstorehashcache(self, remotepath):
500 '''read the store hash cache for a given remote repository'''
501 '''read the store hash cache for a given remote repository'''
501 cachefile = self._getstorehashcachepath(remotepath)
502 cachefile = self._getstorehashcachepath(remotepath)
502 if not os.path.exists(cachefile):
503 if not os.path.exists(cachefile):
503 return ''
504 return ''
504 fd = open(cachefile, 'r')
505 fd = open(cachefile, 'r')
505 pullstate = fd.readlines()
506 pullstate = fd.readlines()
506 fd.close()
507 fd.close()
507 return pullstate
508 return pullstate
508
509
509 def _cachestorehash(self, remotepath):
510 def _cachestorehash(self, remotepath):
510 '''cache the current store hash
511 '''cache the current store hash
511
512
512 Each remote repo requires its own store hash cache, because a subrepo
513 Each remote repo requires its own store hash cache, because a subrepo
513 store may be "clean" versus a given remote repo, but not versus another
514 store may be "clean" versus a given remote repo, but not versus another
514 '''
515 '''
515 cachefile = self._getstorehashcachepath(remotepath)
516 cachefile = self._getstorehashcachepath(remotepath)
516 lock = self._repo.lock()
517 lock = self._repo.lock()
517 storehash = list(self._calcstorehash(remotepath))
518 storehash = list(self._calcstorehash(remotepath))
518 cachedir = os.path.dirname(cachefile)
519 cachedir = os.path.dirname(cachefile)
519 if not os.path.exists(cachedir):
520 if not os.path.exists(cachedir):
520 util.makedirs(cachedir, notindexed=True)
521 util.makedirs(cachedir, notindexed=True)
521 fd = open(cachefile, 'w')
522 fd = open(cachefile, 'w')
522 fd.writelines(storehash)
523 fd.writelines(storehash)
523 fd.close()
524 fd.close()
524 lock.release()
525 lock.release()
525
526
526 @annotatesubrepoerror
527 @annotatesubrepoerror
527 def _initrepo(self, parentrepo, source, create):
528 def _initrepo(self, parentrepo, source, create):
528 self._repo._subparent = parentrepo
529 self._repo._subparent = parentrepo
529 self._repo._subsource = source
530 self._repo._subsource = source
530
531
531 if create:
532 if create:
532 fp = self._repo.opener("hgrc", "w", text=True)
533 fp = self._repo.opener("hgrc", "w", text=True)
533 fp.write('[paths]\n')
534 fp.write('[paths]\n')
534
535
535 def addpathconfig(key, value):
536 def addpathconfig(key, value):
536 if value:
537 if value:
537 fp.write('%s = %s\n' % (key, value))
538 fp.write('%s = %s\n' % (key, value))
538 self._repo.ui.setconfig('paths', key, value)
539 self._repo.ui.setconfig('paths', key, value)
539
540
540 defpath = _abssource(self._repo, abort=False)
541 defpath = _abssource(self._repo, abort=False)
541 defpushpath = _abssource(self._repo, True, abort=False)
542 defpushpath = _abssource(self._repo, True, abort=False)
542 addpathconfig('default', defpath)
543 addpathconfig('default', defpath)
543 if defpath != defpushpath:
544 if defpath != defpushpath:
544 addpathconfig('default-push', defpushpath)
545 addpathconfig('default-push', defpushpath)
545 fp.close()
546 fp.close()
546
547
547 @annotatesubrepoerror
548 @annotatesubrepoerror
548 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
549 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
549 return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
550 return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
550 os.path.join(prefix, self._path), explicitonly)
551 os.path.join(prefix, self._path), explicitonly)
551
552
552 @annotatesubrepoerror
553 @annotatesubrepoerror
553 def status(self, rev2, **opts):
554 def status(self, rev2, **opts):
554 try:
555 try:
555 rev1 = self._state[1]
556 rev1 = self._state[1]
556 ctx1 = self._repo[rev1]
557 ctx1 = self._repo[rev1]
557 ctx2 = self._repo[rev2]
558 ctx2 = self._repo[rev2]
558 return self._repo.status(ctx1, ctx2, **opts)
559 return self._repo.status(ctx1, ctx2, **opts)
559 except error.RepoLookupError, inst:
560 except error.RepoLookupError, inst:
560 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
561 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
561 % (inst, subrelpath(self)))
562 % (inst, subrelpath(self)))
562 return [], [], [], [], [], [], []
563 return [], [], [], [], [], [], []
563
564
564 @annotatesubrepoerror
565 @annotatesubrepoerror
565 def diff(self, ui, diffopts, node2, match, prefix, **opts):
566 def diff(self, ui, diffopts, node2, match, prefix, **opts):
566 try:
567 try:
567 node1 = node.bin(self._state[1])
568 node1 = node.bin(self._state[1])
568 # We currently expect node2 to come from substate and be
569 # We currently expect node2 to come from substate and be
569 # in hex format
570 # in hex format
570 if node2 is not None:
571 if node2 is not None:
571 node2 = node.bin(node2)
572 node2 = node.bin(node2)
572 cmdutil.diffordiffstat(ui, self._repo, diffopts,
573 cmdutil.diffordiffstat(ui, self._repo, diffopts,
573 node1, node2, match,
574 node1, node2, match,
574 prefix=posixpath.join(prefix, self._path),
575 prefix=posixpath.join(prefix, self._path),
575 listsubrepos=True, **opts)
576 listsubrepos=True, **opts)
576 except error.RepoLookupError, inst:
577 except error.RepoLookupError, inst:
577 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
578 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
578 % (inst, subrelpath(self)))
579 % (inst, subrelpath(self)))
579
580
580 @annotatesubrepoerror
581 @annotatesubrepoerror
581 def archive(self, ui, archiver, prefix, match=None):
582 def archive(self, ui, archiver, prefix, match=None):
582 self._get(self._state + ('hg',))
583 self._get(self._state + ('hg',))
583 abstractsubrepo.archive(self, ui, archiver, prefix, match)
584 total = abstractsubrepo.archive(self, ui, archiver, prefix, match)
584
585 rev = self._state[1]
585 rev = self._state[1]
586 ctx = self._repo[rev]
586 ctx = self._repo[rev]
587 for subpath in ctx.substate:
587 for subpath in ctx.substate:
588 s = subrepo(ctx, subpath)
588 s = subrepo(ctx, subpath)
589 submatch = matchmod.narrowmatcher(subpath, match)
589 submatch = matchmod.narrowmatcher(subpath, match)
590 s.archive(ui, archiver, os.path.join(prefix, self._path), submatch)
590 total += s.archive(
591 ui, archiver, os.path.join(prefix, self._path), submatch)
592 return total
591
593
592 @annotatesubrepoerror
594 @annotatesubrepoerror
593 def dirty(self, ignoreupdate=False):
595 def dirty(self, ignoreupdate=False):
594 r = self._state[1]
596 r = self._state[1]
595 if r == '' and not ignoreupdate: # no state recorded
597 if r == '' and not ignoreupdate: # no state recorded
596 return True
598 return True
597 w = self._repo[None]
599 w = self._repo[None]
598 if r != w.p1().hex() and not ignoreupdate:
600 if r != w.p1().hex() and not ignoreupdate:
599 # different version checked out
601 # different version checked out
600 return True
602 return True
601 return w.dirty() # working directory changed
603 return w.dirty() # working directory changed
602
604
603 def basestate(self):
605 def basestate(self):
604 return self._repo['.'].hex()
606 return self._repo['.'].hex()
605
607
606 def checknested(self, path):
608 def checknested(self, path):
607 return self._repo._checknested(self._repo.wjoin(path))
609 return self._repo._checknested(self._repo.wjoin(path))
608
610
609 @annotatesubrepoerror
611 @annotatesubrepoerror
610 def commit(self, text, user, date):
612 def commit(self, text, user, date):
611 # don't bother committing in the subrepo if it's only been
613 # don't bother committing in the subrepo if it's only been
612 # updated
614 # updated
613 if not self.dirty(True):
615 if not self.dirty(True):
614 return self._repo['.'].hex()
616 return self._repo['.'].hex()
615 self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
617 self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
616 n = self._repo.commit(text, user, date)
618 n = self._repo.commit(text, user, date)
617 if not n:
619 if not n:
618 return self._repo['.'].hex() # different version checked out
620 return self._repo['.'].hex() # different version checked out
619 return node.hex(n)
621 return node.hex(n)
620
622
621 @annotatesubrepoerror
623 @annotatesubrepoerror
622 def remove(self):
624 def remove(self):
623 # we can't fully delete the repository as it may contain
625 # we can't fully delete the repository as it may contain
624 # local-only history
626 # local-only history
625 self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
627 self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
626 hg.clean(self._repo, node.nullid, False)
628 hg.clean(self._repo, node.nullid, False)
627
629
628 def _get(self, state):
630 def _get(self, state):
629 source, revision, kind = state
631 source, revision, kind = state
630 if revision not in self._repo:
632 if revision not in self._repo:
631 self._repo._subsource = source
633 self._repo._subsource = source
632 srcurl = _abssource(self._repo)
634 srcurl = _abssource(self._repo)
633 other = hg.peer(self._repo, {}, srcurl)
635 other = hg.peer(self._repo, {}, srcurl)
634 if len(self._repo) == 0:
636 if len(self._repo) == 0:
635 self._repo.ui.status(_('cloning subrepo %s from %s\n')
637 self._repo.ui.status(_('cloning subrepo %s from %s\n')
636 % (subrelpath(self), srcurl))
638 % (subrelpath(self), srcurl))
637 parentrepo = self._repo._subparent
639 parentrepo = self._repo._subparent
638 shutil.rmtree(self._repo.path)
640 shutil.rmtree(self._repo.path)
639 other, cloned = hg.clone(self._repo._subparent.baseui, {},
641 other, cloned = hg.clone(self._repo._subparent.baseui, {},
640 other, self._repo.root,
642 other, self._repo.root,
641 update=False)
643 update=False)
642 self._repo = cloned.local()
644 self._repo = cloned.local()
643 self._initrepo(parentrepo, source, create=True)
645 self._initrepo(parentrepo, source, create=True)
644 self._cachestorehash(srcurl)
646 self._cachestorehash(srcurl)
645 else:
647 else:
646 self._repo.ui.status(_('pulling subrepo %s from %s\n')
648 self._repo.ui.status(_('pulling subrepo %s from %s\n')
647 % (subrelpath(self), srcurl))
649 % (subrelpath(self), srcurl))
648 cleansub = self.storeclean(srcurl)
650 cleansub = self.storeclean(srcurl)
649 remotebookmarks = other.listkeys('bookmarks')
651 remotebookmarks = other.listkeys('bookmarks')
650 self._repo.pull(other)
652 self._repo.pull(other)
651 bookmarks.updatefromremote(self._repo.ui, self._repo,
653 bookmarks.updatefromremote(self._repo.ui, self._repo,
652 remotebookmarks, srcurl)
654 remotebookmarks, srcurl)
653 if cleansub:
655 if cleansub:
654 # keep the repo clean after pull
656 # keep the repo clean after pull
655 self._cachestorehash(srcurl)
657 self._cachestorehash(srcurl)
656
658
657 @annotatesubrepoerror
659 @annotatesubrepoerror
658 def get(self, state, overwrite=False):
660 def get(self, state, overwrite=False):
659 self._get(state)
661 self._get(state)
660 source, revision, kind = state
662 source, revision, kind = state
661 self._repo.ui.debug("getting subrepo %s\n" % self._path)
663 self._repo.ui.debug("getting subrepo %s\n" % self._path)
662 hg.updaterepo(self._repo, revision, overwrite)
664 hg.updaterepo(self._repo, revision, overwrite)
663
665
664 @annotatesubrepoerror
666 @annotatesubrepoerror
665 def merge(self, state):
667 def merge(self, state):
666 self._get(state)
668 self._get(state)
667 cur = self._repo['.']
669 cur = self._repo['.']
668 dst = self._repo[state[1]]
670 dst = self._repo[state[1]]
669 anc = dst.ancestor(cur)
671 anc = dst.ancestor(cur)
670
672
671 def mergefunc():
673 def mergefunc():
672 if anc == cur and dst.branch() == cur.branch():
674 if anc == cur and dst.branch() == cur.branch():
673 self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
675 self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
674 hg.update(self._repo, state[1])
676 hg.update(self._repo, state[1])
675 elif anc == dst:
677 elif anc == dst:
676 self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
678 self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
677 else:
679 else:
678 self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
680 self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
679 hg.merge(self._repo, state[1], remind=False)
681 hg.merge(self._repo, state[1], remind=False)
680
682
681 wctx = self._repo[None]
683 wctx = self._repo[None]
682 if self.dirty():
684 if self.dirty():
683 if anc != dst:
685 if anc != dst:
684 if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
686 if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
685 mergefunc()
687 mergefunc()
686 else:
688 else:
687 mergefunc()
689 mergefunc()
688 else:
690 else:
689 mergefunc()
691 mergefunc()
690
692
691 @annotatesubrepoerror
693 @annotatesubrepoerror
692 def push(self, opts):
694 def push(self, opts):
693 force = opts.get('force')
695 force = opts.get('force')
694 newbranch = opts.get('new_branch')
696 newbranch = opts.get('new_branch')
695 ssh = opts.get('ssh')
697 ssh = opts.get('ssh')
696
698
697 # push subrepos depth-first for coherent ordering
699 # push subrepos depth-first for coherent ordering
698 c = self._repo['']
700 c = self._repo['']
699 subs = c.substate # only repos that are committed
701 subs = c.substate # only repos that are committed
700 for s in sorted(subs):
702 for s in sorted(subs):
701 if c.sub(s).push(opts) == 0:
703 if c.sub(s).push(opts) == 0:
702 return False
704 return False
703
705
704 dsturl = _abssource(self._repo, True)
706 dsturl = _abssource(self._repo, True)
705 if not force:
707 if not force:
706 if self.storeclean(dsturl):
708 if self.storeclean(dsturl):
707 self._repo.ui.status(
709 self._repo.ui.status(
708 _('no changes made to subrepo %s since last push to %s\n')
710 _('no changes made to subrepo %s since last push to %s\n')
709 % (subrelpath(self), dsturl))
711 % (subrelpath(self), dsturl))
710 return None
712 return None
711 self._repo.ui.status(_('pushing subrepo %s to %s\n') %
713 self._repo.ui.status(_('pushing subrepo %s to %s\n') %
712 (subrelpath(self), dsturl))
714 (subrelpath(self), dsturl))
713 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
715 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
714 res = self._repo.push(other, force, newbranch=newbranch)
716 res = self._repo.push(other, force, newbranch=newbranch)
715
717
716 # the repo is now clean
718 # the repo is now clean
717 self._cachestorehash(dsturl)
719 self._cachestorehash(dsturl)
718 return res
720 return res
719
721
720 @annotatesubrepoerror
722 @annotatesubrepoerror
721 def outgoing(self, ui, dest, opts):
723 def outgoing(self, ui, dest, opts):
722 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
724 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
723
725
724 @annotatesubrepoerror
726 @annotatesubrepoerror
725 def incoming(self, ui, source, opts):
727 def incoming(self, ui, source, opts):
726 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
728 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
727
729
728 @annotatesubrepoerror
730 @annotatesubrepoerror
729 def files(self):
731 def files(self):
730 rev = self._state[1]
732 rev = self._state[1]
731 ctx = self._repo[rev]
733 ctx = self._repo[rev]
732 return ctx.manifest()
734 return ctx.manifest()
733
735
734 def filedata(self, name):
736 def filedata(self, name):
735 rev = self._state[1]
737 rev = self._state[1]
736 return self._repo[rev][name].data()
738 return self._repo[rev][name].data()
737
739
738 def fileflags(self, name):
740 def fileflags(self, name):
739 rev = self._state[1]
741 rev = self._state[1]
740 ctx = self._repo[rev]
742 ctx = self._repo[rev]
741 return ctx.flags(name)
743 return ctx.flags(name)
742
744
743 def walk(self, match):
745 def walk(self, match):
744 ctx = self._repo[None]
746 ctx = self._repo[None]
745 return ctx.walk(match)
747 return ctx.walk(match)
746
748
747 @annotatesubrepoerror
749 @annotatesubrepoerror
748 def forget(self, ui, match, prefix):
750 def forget(self, ui, match, prefix):
749 return cmdutil.forget(ui, self._repo, match,
751 return cmdutil.forget(ui, self._repo, match,
750 os.path.join(prefix, self._path), True)
752 os.path.join(prefix, self._path), True)
751
753
752 @annotatesubrepoerror
754 @annotatesubrepoerror
753 def revert(self, ui, substate, *pats, **opts):
755 def revert(self, ui, substate, *pats, **opts):
754 # reverting a subrepo is a 2 step process:
756 # reverting a subrepo is a 2 step process:
755 # 1. if the no_backup is not set, revert all modified
757 # 1. if the no_backup is not set, revert all modified
756 # files inside the subrepo
758 # files inside the subrepo
757 # 2. update the subrepo to the revision specified in
759 # 2. update the subrepo to the revision specified in
758 # the corresponding substate dictionary
760 # the corresponding substate dictionary
759 ui.status(_('reverting subrepo %s\n') % substate[0])
761 ui.status(_('reverting subrepo %s\n') % substate[0])
760 if not opts.get('no_backup'):
762 if not opts.get('no_backup'):
761 # Revert all files on the subrepo, creating backups
763 # Revert all files on the subrepo, creating backups
762 # Note that this will not recursively revert subrepos
764 # Note that this will not recursively revert subrepos
763 # We could do it if there was a set:subrepos() predicate
765 # We could do it if there was a set:subrepos() predicate
764 opts = opts.copy()
766 opts = opts.copy()
765 opts['date'] = None
767 opts['date'] = None
766 opts['rev'] = substate[1]
768 opts['rev'] = substate[1]
767
769
768 pats = []
770 pats = []
769 if not opts.get('all'):
771 if not opts.get('all'):
770 pats = ['set:modified()']
772 pats = ['set:modified()']
771 self.filerevert(ui, *pats, **opts)
773 self.filerevert(ui, *pats, **opts)
772
774
773 # Update the repo to the revision specified in the given substate
775 # Update the repo to the revision specified in the given substate
774 self.get(substate, overwrite=True)
776 self.get(substate, overwrite=True)
775
777
776 def filerevert(self, ui, *pats, **opts):
778 def filerevert(self, ui, *pats, **opts):
777 ctx = self._repo[opts['rev']]
779 ctx = self._repo[opts['rev']]
778 parents = self._repo.dirstate.parents()
780 parents = self._repo.dirstate.parents()
779 if opts.get('all'):
781 if opts.get('all'):
780 pats = ['set:modified()']
782 pats = ['set:modified()']
781 else:
783 else:
782 pats = []
784 pats = []
783 cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
785 cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
784
786
785 class svnsubrepo(abstractsubrepo):
787 class svnsubrepo(abstractsubrepo):
786 def __init__(self, ctx, path, state):
788 def __init__(self, ctx, path, state):
787 self._path = path
789 self._path = path
788 self._state = state
790 self._state = state
789 self._ctx = ctx
791 self._ctx = ctx
790 self._ui = ctx._repo.ui
792 self._ui = ctx._repo.ui
791 self._exe = util.findexe('svn')
793 self._exe = util.findexe('svn')
792 if not self._exe:
794 if not self._exe:
793 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
795 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
794 % self._path)
796 % self._path)
795
797
796 def _svncommand(self, commands, filename='', failok=False):
798 def _svncommand(self, commands, filename='', failok=False):
797 cmd = [self._exe]
799 cmd = [self._exe]
798 extrakw = {}
800 extrakw = {}
799 if not self._ui.interactive():
801 if not self._ui.interactive():
800 # Making stdin be a pipe should prevent svn from behaving
802 # Making stdin be a pipe should prevent svn from behaving
801 # interactively even if we can't pass --non-interactive.
803 # interactively even if we can't pass --non-interactive.
802 extrakw['stdin'] = subprocess.PIPE
804 extrakw['stdin'] = subprocess.PIPE
803 # Starting in svn 1.5 --non-interactive is a global flag
805 # Starting in svn 1.5 --non-interactive is a global flag
804 # instead of being per-command, but we need to support 1.4 so
806 # instead of being per-command, but we need to support 1.4 so
805 # we have to be intelligent about what commands take
807 # we have to be intelligent about what commands take
806 # --non-interactive.
808 # --non-interactive.
807 if commands[0] in ('update', 'checkout', 'commit'):
809 if commands[0] in ('update', 'checkout', 'commit'):
808 cmd.append('--non-interactive')
810 cmd.append('--non-interactive')
809 cmd.extend(commands)
811 cmd.extend(commands)
810 if filename is not None:
812 if filename is not None:
811 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
813 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
812 cmd.append(path)
814 cmd.append(path)
813 env = dict(os.environ)
815 env = dict(os.environ)
814 # Avoid localized output, preserve current locale for everything else.
816 # Avoid localized output, preserve current locale for everything else.
815 lc_all = env.get('LC_ALL')
817 lc_all = env.get('LC_ALL')
816 if lc_all:
818 if lc_all:
817 env['LANG'] = lc_all
819 env['LANG'] = lc_all
818 del env['LC_ALL']
820 del env['LC_ALL']
819 env['LC_MESSAGES'] = 'C'
821 env['LC_MESSAGES'] = 'C'
820 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
822 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
821 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
823 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
822 universal_newlines=True, env=env, **extrakw)
824 universal_newlines=True, env=env, **extrakw)
823 stdout, stderr = p.communicate()
825 stdout, stderr = p.communicate()
824 stderr = stderr.strip()
826 stderr = stderr.strip()
825 if not failok:
827 if not failok:
826 if p.returncode:
828 if p.returncode:
827 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
829 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
828 if stderr:
830 if stderr:
829 self._ui.warn(stderr + '\n')
831 self._ui.warn(stderr + '\n')
830 return stdout, stderr
832 return stdout, stderr
831
833
832 @propertycache
834 @propertycache
833 def _svnversion(self):
835 def _svnversion(self):
834 output, err = self._svncommand(['--version', '--quiet'], filename=None)
836 output, err = self._svncommand(['--version', '--quiet'], filename=None)
835 m = re.search(r'^(\d+)\.(\d+)', output)
837 m = re.search(r'^(\d+)\.(\d+)', output)
836 if not m:
838 if not m:
837 raise util.Abort(_('cannot retrieve svn tool version'))
839 raise util.Abort(_('cannot retrieve svn tool version'))
838 return (int(m.group(1)), int(m.group(2)))
840 return (int(m.group(1)), int(m.group(2)))
839
841
840 def _wcrevs(self):
842 def _wcrevs(self):
841 # Get the working directory revision as well as the last
843 # Get the working directory revision as well as the last
842 # commit revision so we can compare the subrepo state with
844 # commit revision so we can compare the subrepo state with
843 # both. We used to store the working directory one.
845 # both. We used to store the working directory one.
844 output, err = self._svncommand(['info', '--xml'])
846 output, err = self._svncommand(['info', '--xml'])
845 doc = xml.dom.minidom.parseString(output)
847 doc = xml.dom.minidom.parseString(output)
846 entries = doc.getElementsByTagName('entry')
848 entries = doc.getElementsByTagName('entry')
847 lastrev, rev = '0', '0'
849 lastrev, rev = '0', '0'
848 if entries:
850 if entries:
849 rev = str(entries[0].getAttribute('revision')) or '0'
851 rev = str(entries[0].getAttribute('revision')) or '0'
850 commits = entries[0].getElementsByTagName('commit')
852 commits = entries[0].getElementsByTagName('commit')
851 if commits:
853 if commits:
852 lastrev = str(commits[0].getAttribute('revision')) or '0'
854 lastrev = str(commits[0].getAttribute('revision')) or '0'
853 return (lastrev, rev)
855 return (lastrev, rev)
854
856
855 def _wcrev(self):
857 def _wcrev(self):
856 return self._wcrevs()[0]
858 return self._wcrevs()[0]
857
859
858 def _wcchanged(self):
860 def _wcchanged(self):
859 """Return (changes, extchanges, missing) where changes is True
861 """Return (changes, extchanges, missing) where changes is True
860 if the working directory was changed, extchanges is
862 if the working directory was changed, extchanges is
861 True if any of these changes concern an external entry and missing
863 True if any of these changes concern an external entry and missing
862 is True if any change is a missing entry.
864 is True if any change is a missing entry.
863 """
865 """
864 output, err = self._svncommand(['status', '--xml'])
866 output, err = self._svncommand(['status', '--xml'])
865 externals, changes, missing = [], [], []
867 externals, changes, missing = [], [], []
866 doc = xml.dom.minidom.parseString(output)
868 doc = xml.dom.minidom.parseString(output)
867 for e in doc.getElementsByTagName('entry'):
869 for e in doc.getElementsByTagName('entry'):
868 s = e.getElementsByTagName('wc-status')
870 s = e.getElementsByTagName('wc-status')
869 if not s:
871 if not s:
870 continue
872 continue
871 item = s[0].getAttribute('item')
873 item = s[0].getAttribute('item')
872 props = s[0].getAttribute('props')
874 props = s[0].getAttribute('props')
873 path = e.getAttribute('path')
875 path = e.getAttribute('path')
874 if item == 'external':
876 if item == 'external':
875 externals.append(path)
877 externals.append(path)
876 elif item == 'missing':
878 elif item == 'missing':
877 missing.append(path)
879 missing.append(path)
878 if (item not in ('', 'normal', 'unversioned', 'external')
880 if (item not in ('', 'normal', 'unversioned', 'external')
879 or props not in ('', 'none', 'normal')):
881 or props not in ('', 'none', 'normal')):
880 changes.append(path)
882 changes.append(path)
881 for path in changes:
883 for path in changes:
882 for ext in externals:
884 for ext in externals:
883 if path == ext or path.startswith(ext + os.sep):
885 if path == ext or path.startswith(ext + os.sep):
884 return True, True, bool(missing)
886 return True, True, bool(missing)
885 return bool(changes), False, bool(missing)
887 return bool(changes), False, bool(missing)
886
888
887 def dirty(self, ignoreupdate=False):
889 def dirty(self, ignoreupdate=False):
888 if not self._wcchanged()[0]:
890 if not self._wcchanged()[0]:
889 if self._state[1] in self._wcrevs() or ignoreupdate:
891 if self._state[1] in self._wcrevs() or ignoreupdate:
890 return False
892 return False
891 return True
893 return True
892
894
893 def basestate(self):
895 def basestate(self):
894 lastrev, rev = self._wcrevs()
896 lastrev, rev = self._wcrevs()
895 if lastrev != rev:
897 if lastrev != rev:
896 # Last committed rev is not the same than rev. We would
898 # Last committed rev is not the same than rev. We would
897 # like to take lastrev but we do not know if the subrepo
899 # like to take lastrev but we do not know if the subrepo
898 # URL exists at lastrev. Test it and fallback to rev it
900 # URL exists at lastrev. Test it and fallback to rev it
899 # is not there.
901 # is not there.
900 try:
902 try:
901 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
903 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
902 return lastrev
904 return lastrev
903 except error.Abort:
905 except error.Abort:
904 pass
906 pass
905 return rev
907 return rev
906
908
907 @annotatesubrepoerror
909 @annotatesubrepoerror
908 def commit(self, text, user, date):
910 def commit(self, text, user, date):
909 # user and date are out of our hands since svn is centralized
911 # user and date are out of our hands since svn is centralized
910 changed, extchanged, missing = self._wcchanged()
912 changed, extchanged, missing = self._wcchanged()
911 if not changed:
913 if not changed:
912 return self.basestate()
914 return self.basestate()
913 if extchanged:
915 if extchanged:
914 # Do not try to commit externals
916 # Do not try to commit externals
915 raise util.Abort(_('cannot commit svn externals'))
917 raise util.Abort(_('cannot commit svn externals'))
916 if missing:
918 if missing:
917 # svn can commit with missing entries but aborting like hg
919 # svn can commit with missing entries but aborting like hg
918 # seems a better approach.
920 # seems a better approach.
919 raise util.Abort(_('cannot commit missing svn entries'))
921 raise util.Abort(_('cannot commit missing svn entries'))
920 commitinfo, err = self._svncommand(['commit', '-m', text])
922 commitinfo, err = self._svncommand(['commit', '-m', text])
921 self._ui.status(commitinfo)
923 self._ui.status(commitinfo)
922 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
924 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
923 if not newrev:
925 if not newrev:
924 if not commitinfo.strip():
926 if not commitinfo.strip():
925 # Sometimes, our definition of "changed" differs from
927 # Sometimes, our definition of "changed" differs from
926 # svn one. For instance, svn ignores missing files
928 # svn one. For instance, svn ignores missing files
927 # when committing. If there are only missing files, no
929 # when committing. If there are only missing files, no
928 # commit is made, no output and no error code.
930 # commit is made, no output and no error code.
929 raise util.Abort(_('failed to commit svn changes'))
931 raise util.Abort(_('failed to commit svn changes'))
930 raise util.Abort(commitinfo.splitlines()[-1])
932 raise util.Abort(commitinfo.splitlines()[-1])
931 newrev = newrev.groups()[0]
933 newrev = newrev.groups()[0]
932 self._ui.status(self._svncommand(['update', '-r', newrev])[0])
934 self._ui.status(self._svncommand(['update', '-r', newrev])[0])
933 return newrev
935 return newrev
934
936
935 @annotatesubrepoerror
937 @annotatesubrepoerror
936 def remove(self):
938 def remove(self):
937 if self.dirty():
939 if self.dirty():
938 self._ui.warn(_('not removing repo %s because '
940 self._ui.warn(_('not removing repo %s because '
939 'it has changes.\n' % self._path))
941 'it has changes.\n' % self._path))
940 return
942 return
941 self._ui.note(_('removing subrepo %s\n') % self._path)
943 self._ui.note(_('removing subrepo %s\n') % self._path)
942
944
943 def onerror(function, path, excinfo):
945 def onerror(function, path, excinfo):
944 if function is not os.remove:
946 if function is not os.remove:
945 raise
947 raise
946 # read-only files cannot be unlinked under Windows
948 # read-only files cannot be unlinked under Windows
947 s = os.stat(path)
949 s = os.stat(path)
948 if (s.st_mode & stat.S_IWRITE) != 0:
950 if (s.st_mode & stat.S_IWRITE) != 0:
949 raise
951 raise
950 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
952 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
951 os.remove(path)
953 os.remove(path)
952
954
953 path = self._ctx._repo.wjoin(self._path)
955 path = self._ctx._repo.wjoin(self._path)
954 shutil.rmtree(path, onerror=onerror)
956 shutil.rmtree(path, onerror=onerror)
955 try:
957 try:
956 os.removedirs(os.path.dirname(path))
958 os.removedirs(os.path.dirname(path))
957 except OSError:
959 except OSError:
958 pass
960 pass
959
961
960 @annotatesubrepoerror
962 @annotatesubrepoerror
961 def get(self, state, overwrite=False):
963 def get(self, state, overwrite=False):
962 if overwrite:
964 if overwrite:
963 self._svncommand(['revert', '--recursive'])
965 self._svncommand(['revert', '--recursive'])
964 args = ['checkout']
966 args = ['checkout']
965 if self._svnversion >= (1, 5):
967 if self._svnversion >= (1, 5):
966 args.append('--force')
968 args.append('--force')
967 # The revision must be specified at the end of the URL to properly
969 # The revision must be specified at the end of the URL to properly
968 # update to a directory which has since been deleted and recreated.
970 # update to a directory which has since been deleted and recreated.
969 args.append('%s@%s' % (state[0], state[1]))
971 args.append('%s@%s' % (state[0], state[1]))
970 status, err = self._svncommand(args, failok=True)
972 status, err = self._svncommand(args, failok=True)
971 if not re.search('Checked out revision [0-9]+.', status):
973 if not re.search('Checked out revision [0-9]+.', status):
972 if ('is already a working copy for a different URL' in err
974 if ('is already a working copy for a different URL' in err
973 and (self._wcchanged()[:2] == (False, False))):
975 and (self._wcchanged()[:2] == (False, False))):
974 # obstructed but clean working copy, so just blow it away.
976 # obstructed but clean working copy, so just blow it away.
975 self.remove()
977 self.remove()
976 self.get(state, overwrite=False)
978 self.get(state, overwrite=False)
977 return
979 return
978 raise util.Abort((status or err).splitlines()[-1])
980 raise util.Abort((status or err).splitlines()[-1])
979 self._ui.status(status)
981 self._ui.status(status)
980
982
981 @annotatesubrepoerror
983 @annotatesubrepoerror
982 def merge(self, state):
984 def merge(self, state):
983 old = self._state[1]
985 old = self._state[1]
984 new = state[1]
986 new = state[1]
985 wcrev = self._wcrev()
987 wcrev = self._wcrev()
986 if new != wcrev:
988 if new != wcrev:
987 dirty = old == wcrev or self._wcchanged()[0]
989 dirty = old == wcrev or self._wcchanged()[0]
988 if _updateprompt(self._ui, self, dirty, wcrev, new):
990 if _updateprompt(self._ui, self, dirty, wcrev, new):
989 self.get(state, False)
991 self.get(state, False)
990
992
991 def push(self, opts):
993 def push(self, opts):
992 # push is a no-op for SVN
994 # push is a no-op for SVN
993 return True
995 return True
994
996
995 @annotatesubrepoerror
997 @annotatesubrepoerror
996 def files(self):
998 def files(self):
997 output = self._svncommand(['list', '--recursive', '--xml'])[0]
999 output = self._svncommand(['list', '--recursive', '--xml'])[0]
998 doc = xml.dom.minidom.parseString(output)
1000 doc = xml.dom.minidom.parseString(output)
999 paths = []
1001 paths = []
1000 for e in doc.getElementsByTagName('entry'):
1002 for e in doc.getElementsByTagName('entry'):
1001 kind = str(e.getAttribute('kind'))
1003 kind = str(e.getAttribute('kind'))
1002 if kind != 'file':
1004 if kind != 'file':
1003 continue
1005 continue
1004 name = ''.join(c.data for c
1006 name = ''.join(c.data for c
1005 in e.getElementsByTagName('name')[0].childNodes
1007 in e.getElementsByTagName('name')[0].childNodes
1006 if c.nodeType == c.TEXT_NODE)
1008 if c.nodeType == c.TEXT_NODE)
1007 paths.append(name.encode('utf-8'))
1009 paths.append(name.encode('utf-8'))
1008 return paths
1010 return paths
1009
1011
1010 def filedata(self, name):
1012 def filedata(self, name):
1011 return self._svncommand(['cat'], name)[0]
1013 return self._svncommand(['cat'], name)[0]
1012
1014
1013
1015
1014 class gitsubrepo(abstractsubrepo):
1016 class gitsubrepo(abstractsubrepo):
1015 def __init__(self, ctx, path, state):
1017 def __init__(self, ctx, path, state):
1016 self._state = state
1018 self._state = state
1017 self._ctx = ctx
1019 self._ctx = ctx
1018 self._path = path
1020 self._path = path
1019 self._relpath = os.path.join(reporelpath(ctx._repo), path)
1021 self._relpath = os.path.join(reporelpath(ctx._repo), path)
1020 self._abspath = ctx._repo.wjoin(path)
1022 self._abspath = ctx._repo.wjoin(path)
1021 self._subparent = ctx._repo
1023 self._subparent = ctx._repo
1022 self._ui = ctx._repo.ui
1024 self._ui = ctx._repo.ui
1023 self._ensuregit()
1025 self._ensuregit()
1024
1026
1025 def _ensuregit(self):
1027 def _ensuregit(self):
1026 try:
1028 try:
1027 self._gitexecutable = 'git'
1029 self._gitexecutable = 'git'
1028 out, err = self._gitnodir(['--version'])
1030 out, err = self._gitnodir(['--version'])
1029 except OSError, e:
1031 except OSError, e:
1030 if e.errno != 2 or os.name != 'nt':
1032 if e.errno != 2 or os.name != 'nt':
1031 raise
1033 raise
1032 self._gitexecutable = 'git.cmd'
1034 self._gitexecutable = 'git.cmd'
1033 out, err = self._gitnodir(['--version'])
1035 out, err = self._gitnodir(['--version'])
1034 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1036 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1035 if not m:
1037 if not m:
1036 self._ui.warn(_('cannot retrieve git version'))
1038 self._ui.warn(_('cannot retrieve git version'))
1037 return
1039 return
1038 version = (int(m.group(1)), m.group(2), m.group(3))
1040 version = (int(m.group(1)), m.group(2), m.group(3))
1039 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1041 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1040 # despite the docstring comment. For now, error on 1.4.0, warn on
1042 # despite the docstring comment. For now, error on 1.4.0, warn on
1041 # 1.5.0 but attempt to continue.
1043 # 1.5.0 but attempt to continue.
1042 if version < (1, 5, 0):
1044 if version < (1, 5, 0):
1043 raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
1045 raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
1044 elif version < (1, 6, 0):
1046 elif version < (1, 6, 0):
1045 self._ui.warn(_('git subrepo requires at least 1.6.0 or later'))
1047 self._ui.warn(_('git subrepo requires at least 1.6.0 or later'))
1046
1048
1047 def _gitcommand(self, commands, env=None, stream=False):
1049 def _gitcommand(self, commands, env=None, stream=False):
1048 return self._gitdir(commands, env=env, stream=stream)[0]
1050 return self._gitdir(commands, env=env, stream=stream)[0]
1049
1051
1050 def _gitdir(self, commands, env=None, stream=False):
1052 def _gitdir(self, commands, env=None, stream=False):
1051 return self._gitnodir(commands, env=env, stream=stream,
1053 return self._gitnodir(commands, env=env, stream=stream,
1052 cwd=self._abspath)
1054 cwd=self._abspath)
1053
1055
1054 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1056 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1055 """Calls the git command
1057 """Calls the git command
1056
1058
1057 The methods tries to call the git command. versions prior to 1.6.0
1059 The methods tries to call the git command. versions prior to 1.6.0
1058 are not supported and very probably fail.
1060 are not supported and very probably fail.
1059 """
1061 """
1060 self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1062 self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1061 # unless ui.quiet is set, print git's stderr,
1063 # unless ui.quiet is set, print git's stderr,
1062 # which is mostly progress and useful info
1064 # which is mostly progress and useful info
1063 errpipe = None
1065 errpipe = None
1064 if self._ui.quiet:
1066 if self._ui.quiet:
1065 errpipe = open(os.devnull, 'w')
1067 errpipe = open(os.devnull, 'w')
1066 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1068 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1067 cwd=cwd, env=env, close_fds=util.closefds,
1069 cwd=cwd, env=env, close_fds=util.closefds,
1068 stdout=subprocess.PIPE, stderr=errpipe)
1070 stdout=subprocess.PIPE, stderr=errpipe)
1069 if stream:
1071 if stream:
1070 return p.stdout, None
1072 return p.stdout, None
1071
1073
1072 retdata = p.stdout.read().strip()
1074 retdata = p.stdout.read().strip()
1073 # wait for the child to exit to avoid race condition.
1075 # wait for the child to exit to avoid race condition.
1074 p.wait()
1076 p.wait()
1075
1077
1076 if p.returncode != 0 and p.returncode != 1:
1078 if p.returncode != 0 and p.returncode != 1:
1077 # there are certain error codes that are ok
1079 # there are certain error codes that are ok
1078 command = commands[0]
1080 command = commands[0]
1079 if command in ('cat-file', 'symbolic-ref'):
1081 if command in ('cat-file', 'symbolic-ref'):
1080 return retdata, p.returncode
1082 return retdata, p.returncode
1081 # for all others, abort
1083 # for all others, abort
1082 raise util.Abort('git %s error %d in %s' %
1084 raise util.Abort('git %s error %d in %s' %
1083 (command, p.returncode, self._relpath))
1085 (command, p.returncode, self._relpath))
1084
1086
1085 return retdata, p.returncode
1087 return retdata, p.returncode
1086
1088
1087 def _gitmissing(self):
1089 def _gitmissing(self):
1088 return not os.path.exists(os.path.join(self._abspath, '.git'))
1090 return not os.path.exists(os.path.join(self._abspath, '.git'))
1089
1091
1090 def _gitstate(self):
1092 def _gitstate(self):
1091 return self._gitcommand(['rev-parse', 'HEAD'])
1093 return self._gitcommand(['rev-parse', 'HEAD'])
1092
1094
1093 def _gitcurrentbranch(self):
1095 def _gitcurrentbranch(self):
1094 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1096 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1095 if err:
1097 if err:
1096 current = None
1098 current = None
1097 return current
1099 return current
1098
1100
1099 def _gitremote(self, remote):
1101 def _gitremote(self, remote):
1100 out = self._gitcommand(['remote', 'show', '-n', remote])
1102 out = self._gitcommand(['remote', 'show', '-n', remote])
1101 line = out.split('\n')[1]
1103 line = out.split('\n')[1]
1102 i = line.index('URL: ') + len('URL: ')
1104 i = line.index('URL: ') + len('URL: ')
1103 return line[i:]
1105 return line[i:]
1104
1106
1105 def _githavelocally(self, revision):
1107 def _githavelocally(self, revision):
1106 out, code = self._gitdir(['cat-file', '-e', revision])
1108 out, code = self._gitdir(['cat-file', '-e', revision])
1107 return code == 0
1109 return code == 0
1108
1110
1109 def _gitisancestor(self, r1, r2):
1111 def _gitisancestor(self, r1, r2):
1110 base = self._gitcommand(['merge-base', r1, r2])
1112 base = self._gitcommand(['merge-base', r1, r2])
1111 return base == r1
1113 return base == r1
1112
1114
1113 def _gitisbare(self):
1115 def _gitisbare(self):
1114 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1116 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1115
1117
1116 def _gitupdatestat(self):
1118 def _gitupdatestat(self):
1117 """This must be run before git diff-index.
1119 """This must be run before git diff-index.
1118 diff-index only looks at changes to file stat;
1120 diff-index only looks at changes to file stat;
1119 this command looks at file contents and updates the stat."""
1121 this command looks at file contents and updates the stat."""
1120 self._gitcommand(['update-index', '-q', '--refresh'])
1122 self._gitcommand(['update-index', '-q', '--refresh'])
1121
1123
1122 def _gitbranchmap(self):
1124 def _gitbranchmap(self):
1123 '''returns 2 things:
1125 '''returns 2 things:
1124 a map from git branch to revision
1126 a map from git branch to revision
1125 a map from revision to branches'''
1127 a map from revision to branches'''
1126 branch2rev = {}
1128 branch2rev = {}
1127 rev2branch = {}
1129 rev2branch = {}
1128
1130
1129 out = self._gitcommand(['for-each-ref', '--format',
1131 out = self._gitcommand(['for-each-ref', '--format',
1130 '%(objectname) %(refname)'])
1132 '%(objectname) %(refname)'])
1131 for line in out.split('\n'):
1133 for line in out.split('\n'):
1132 revision, ref = line.split(' ')
1134 revision, ref = line.split(' ')
1133 if (not ref.startswith('refs/heads/') and
1135 if (not ref.startswith('refs/heads/') and
1134 not ref.startswith('refs/remotes/')):
1136 not ref.startswith('refs/remotes/')):
1135 continue
1137 continue
1136 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1138 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1137 continue # ignore remote/HEAD redirects
1139 continue # ignore remote/HEAD redirects
1138 branch2rev[ref] = revision
1140 branch2rev[ref] = revision
1139 rev2branch.setdefault(revision, []).append(ref)
1141 rev2branch.setdefault(revision, []).append(ref)
1140 return branch2rev, rev2branch
1142 return branch2rev, rev2branch
1141
1143
1142 def _gittracking(self, branches):
1144 def _gittracking(self, branches):
1143 'return map of remote branch to local tracking branch'
1145 'return map of remote branch to local tracking branch'
1144 # assumes no more than one local tracking branch for each remote
1146 # assumes no more than one local tracking branch for each remote
1145 tracking = {}
1147 tracking = {}
1146 for b in branches:
1148 for b in branches:
1147 if b.startswith('refs/remotes/'):
1149 if b.startswith('refs/remotes/'):
1148 continue
1150 continue
1149 bname = b.split('/', 2)[2]
1151 bname = b.split('/', 2)[2]
1150 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1152 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1151 if remote:
1153 if remote:
1152 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1154 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1153 tracking['refs/remotes/%s/%s' %
1155 tracking['refs/remotes/%s/%s' %
1154 (remote, ref.split('/', 2)[2])] = b
1156 (remote, ref.split('/', 2)[2])] = b
1155 return tracking
1157 return tracking
1156
1158
1157 def _abssource(self, source):
1159 def _abssource(self, source):
1158 if '://' not in source:
1160 if '://' not in source:
1159 # recognize the scp syntax as an absolute source
1161 # recognize the scp syntax as an absolute source
1160 colon = source.find(':')
1162 colon = source.find(':')
1161 if colon != -1 and '/' not in source[:colon]:
1163 if colon != -1 and '/' not in source[:colon]:
1162 return source
1164 return source
1163 self._subsource = source
1165 self._subsource = source
1164 return _abssource(self)
1166 return _abssource(self)
1165
1167
1166 def _fetch(self, source, revision):
1168 def _fetch(self, source, revision):
1167 if self._gitmissing():
1169 if self._gitmissing():
1168 source = self._abssource(source)
1170 source = self._abssource(source)
1169 self._ui.status(_('cloning subrepo %s from %s\n') %
1171 self._ui.status(_('cloning subrepo %s from %s\n') %
1170 (self._relpath, source))
1172 (self._relpath, source))
1171 self._gitnodir(['clone', source, self._abspath])
1173 self._gitnodir(['clone', source, self._abspath])
1172 if self._githavelocally(revision):
1174 if self._githavelocally(revision):
1173 return
1175 return
1174 self._ui.status(_('pulling subrepo %s from %s\n') %
1176 self._ui.status(_('pulling subrepo %s from %s\n') %
1175 (self._relpath, self._gitremote('origin')))
1177 (self._relpath, self._gitremote('origin')))
1176 # try only origin: the originally cloned repo
1178 # try only origin: the originally cloned repo
1177 self._gitcommand(['fetch'])
1179 self._gitcommand(['fetch'])
1178 if not self._githavelocally(revision):
1180 if not self._githavelocally(revision):
1179 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
1181 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
1180 (revision, self._relpath))
1182 (revision, self._relpath))
1181
1183
1182 @annotatesubrepoerror
1184 @annotatesubrepoerror
1183 def dirty(self, ignoreupdate=False):
1185 def dirty(self, ignoreupdate=False):
1184 if self._gitmissing():
1186 if self._gitmissing():
1185 return self._state[1] != ''
1187 return self._state[1] != ''
1186 if self._gitisbare():
1188 if self._gitisbare():
1187 return True
1189 return True
1188 if not ignoreupdate and self._state[1] != self._gitstate():
1190 if not ignoreupdate and self._state[1] != self._gitstate():
1189 # different version checked out
1191 # different version checked out
1190 return True
1192 return True
1191 # check for staged changes or modified files; ignore untracked files
1193 # check for staged changes or modified files; ignore untracked files
1192 self._gitupdatestat()
1194 self._gitupdatestat()
1193 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1195 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1194 return code == 1
1196 return code == 1
1195
1197
1196 def basestate(self):
1198 def basestate(self):
1197 return self._gitstate()
1199 return self._gitstate()
1198
1200
1199 @annotatesubrepoerror
1201 @annotatesubrepoerror
1200 def get(self, state, overwrite=False):
1202 def get(self, state, overwrite=False):
1201 source, revision, kind = state
1203 source, revision, kind = state
1202 if not revision:
1204 if not revision:
1203 self.remove()
1205 self.remove()
1204 return
1206 return
1205 self._fetch(source, revision)
1207 self._fetch(source, revision)
1206 # if the repo was set to be bare, unbare it
1208 # if the repo was set to be bare, unbare it
1207 if self._gitisbare():
1209 if self._gitisbare():
1208 self._gitcommand(['config', 'core.bare', 'false'])
1210 self._gitcommand(['config', 'core.bare', 'false'])
1209 if self._gitstate() == revision:
1211 if self._gitstate() == revision:
1210 self._gitcommand(['reset', '--hard', 'HEAD'])
1212 self._gitcommand(['reset', '--hard', 'HEAD'])
1211 return
1213 return
1212 elif self._gitstate() == revision:
1214 elif self._gitstate() == revision:
1213 if overwrite:
1215 if overwrite:
1214 # first reset the index to unmark new files for commit, because
1216 # first reset the index to unmark new files for commit, because
1215 # reset --hard will otherwise throw away files added for commit,
1217 # reset --hard will otherwise throw away files added for commit,
1216 # not just unmark them.
1218 # not just unmark them.
1217 self._gitcommand(['reset', 'HEAD'])
1219 self._gitcommand(['reset', 'HEAD'])
1218 self._gitcommand(['reset', '--hard', 'HEAD'])
1220 self._gitcommand(['reset', '--hard', 'HEAD'])
1219 return
1221 return
1220 branch2rev, rev2branch = self._gitbranchmap()
1222 branch2rev, rev2branch = self._gitbranchmap()
1221
1223
1222 def checkout(args):
1224 def checkout(args):
1223 cmd = ['checkout']
1225 cmd = ['checkout']
1224 if overwrite:
1226 if overwrite:
1225 # first reset the index to unmark new files for commit, because
1227 # first reset the index to unmark new files for commit, because
1226 # the -f option will otherwise throw away files added for
1228 # the -f option will otherwise throw away files added for
1227 # commit, not just unmark them.
1229 # commit, not just unmark them.
1228 self._gitcommand(['reset', 'HEAD'])
1230 self._gitcommand(['reset', 'HEAD'])
1229 cmd.append('-f')
1231 cmd.append('-f')
1230 self._gitcommand(cmd + args)
1232 self._gitcommand(cmd + args)
1231
1233
1232 def rawcheckout():
1234 def rawcheckout():
1233 # no branch to checkout, check it out with no branch
1235 # no branch to checkout, check it out with no branch
1234 self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1236 self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1235 self._relpath)
1237 self._relpath)
1236 self._ui.warn(_('check out a git branch if you intend '
1238 self._ui.warn(_('check out a git branch if you intend '
1237 'to make changes\n'))
1239 'to make changes\n'))
1238 checkout(['-q', revision])
1240 checkout(['-q', revision])
1239
1241
1240 if revision not in rev2branch:
1242 if revision not in rev2branch:
1241 rawcheckout()
1243 rawcheckout()
1242 return
1244 return
1243 branches = rev2branch[revision]
1245 branches = rev2branch[revision]
1244 firstlocalbranch = None
1246 firstlocalbranch = None
1245 for b in branches:
1247 for b in branches:
1246 if b == 'refs/heads/master':
1248 if b == 'refs/heads/master':
1247 # master trumps all other branches
1249 # master trumps all other branches
1248 checkout(['refs/heads/master'])
1250 checkout(['refs/heads/master'])
1249 return
1251 return
1250 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1252 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1251 firstlocalbranch = b
1253 firstlocalbranch = b
1252 if firstlocalbranch:
1254 if firstlocalbranch:
1253 checkout([firstlocalbranch])
1255 checkout([firstlocalbranch])
1254 return
1256 return
1255
1257
1256 tracking = self._gittracking(branch2rev.keys())
1258 tracking = self._gittracking(branch2rev.keys())
1257 # choose a remote branch already tracked if possible
1259 # choose a remote branch already tracked if possible
1258 remote = branches[0]
1260 remote = branches[0]
1259 if remote not in tracking:
1261 if remote not in tracking:
1260 for b in branches:
1262 for b in branches:
1261 if b in tracking:
1263 if b in tracking:
1262 remote = b
1264 remote = b
1263 break
1265 break
1264
1266
1265 if remote not in tracking:
1267 if remote not in tracking:
1266 # create a new local tracking branch
1268 # create a new local tracking branch
1267 local = remote.split('/', 2)[2]
1269 local = remote.split('/', 2)[2]
1268 checkout(['-b', local, remote])
1270 checkout(['-b', local, remote])
1269 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1271 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1270 # When updating to a tracked remote branch,
1272 # When updating to a tracked remote branch,
1271 # if the local tracking branch is downstream of it,
1273 # if the local tracking branch is downstream of it,
1272 # a normal `git pull` would have performed a "fast-forward merge"
1274 # a normal `git pull` would have performed a "fast-forward merge"
1273 # which is equivalent to updating the local branch to the remote.
1275 # which is equivalent to updating the local branch to the remote.
1274 # Since we are only looking at branching at update, we need to
1276 # Since we are only looking at branching at update, we need to
1275 # detect this situation and perform this action lazily.
1277 # detect this situation and perform this action lazily.
1276 if tracking[remote] != self._gitcurrentbranch():
1278 if tracking[remote] != self._gitcurrentbranch():
1277 checkout([tracking[remote]])
1279 checkout([tracking[remote]])
1278 self._gitcommand(['merge', '--ff', remote])
1280 self._gitcommand(['merge', '--ff', remote])
1279 else:
1281 else:
1280 # a real merge would be required, just checkout the revision
1282 # a real merge would be required, just checkout the revision
1281 rawcheckout()
1283 rawcheckout()
1282
1284
1283 @annotatesubrepoerror
1285 @annotatesubrepoerror
1284 def commit(self, text, user, date):
1286 def commit(self, text, user, date):
1285 if self._gitmissing():
1287 if self._gitmissing():
1286 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1288 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1287 cmd = ['commit', '-a', '-m', text]
1289 cmd = ['commit', '-a', '-m', text]
1288 env = os.environ.copy()
1290 env = os.environ.copy()
1289 if user:
1291 if user:
1290 cmd += ['--author', user]
1292 cmd += ['--author', user]
1291 if date:
1293 if date:
1292 # git's date parser silently ignores when seconds < 1e9
1294 # git's date parser silently ignores when seconds < 1e9
1293 # convert to ISO8601
1295 # convert to ISO8601
1294 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1296 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1295 '%Y-%m-%dT%H:%M:%S %1%2')
1297 '%Y-%m-%dT%H:%M:%S %1%2')
1296 self._gitcommand(cmd, env=env)
1298 self._gitcommand(cmd, env=env)
1297 # make sure commit works otherwise HEAD might not exist under certain
1299 # make sure commit works otherwise HEAD might not exist under certain
1298 # circumstances
1300 # circumstances
1299 return self._gitstate()
1301 return self._gitstate()
1300
1302
1301 @annotatesubrepoerror
1303 @annotatesubrepoerror
1302 def merge(self, state):
1304 def merge(self, state):
1303 source, revision, kind = state
1305 source, revision, kind = state
1304 self._fetch(source, revision)
1306 self._fetch(source, revision)
1305 base = self._gitcommand(['merge-base', revision, self._state[1]])
1307 base = self._gitcommand(['merge-base', revision, self._state[1]])
1306 self._gitupdatestat()
1308 self._gitupdatestat()
1307 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1309 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1308
1310
1309 def mergefunc():
1311 def mergefunc():
1310 if base == revision:
1312 if base == revision:
1311 self.get(state) # fast forward merge
1313 self.get(state) # fast forward merge
1312 elif base != self._state[1]:
1314 elif base != self._state[1]:
1313 self._gitcommand(['merge', '--no-commit', revision])
1315 self._gitcommand(['merge', '--no-commit', revision])
1314
1316
1315 if self.dirty():
1317 if self.dirty():
1316 if self._gitstate() != revision:
1318 if self._gitstate() != revision:
1317 dirty = self._gitstate() == self._state[1] or code != 0
1319 dirty = self._gitstate() == self._state[1] or code != 0
1318 if _updateprompt(self._ui, self, dirty,
1320 if _updateprompt(self._ui, self, dirty,
1319 self._state[1][:7], revision[:7]):
1321 self._state[1][:7], revision[:7]):
1320 mergefunc()
1322 mergefunc()
1321 else:
1323 else:
1322 mergefunc()
1324 mergefunc()
1323
1325
1324 @annotatesubrepoerror
1326 @annotatesubrepoerror
1325 def push(self, opts):
1327 def push(self, opts):
1326 force = opts.get('force')
1328 force = opts.get('force')
1327
1329
1328 if not self._state[1]:
1330 if not self._state[1]:
1329 return True
1331 return True
1330 if self._gitmissing():
1332 if self._gitmissing():
1331 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1333 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1332 # if a branch in origin contains the revision, nothing to do
1334 # if a branch in origin contains the revision, nothing to do
1333 branch2rev, rev2branch = self._gitbranchmap()
1335 branch2rev, rev2branch = self._gitbranchmap()
1334 if self._state[1] in rev2branch:
1336 if self._state[1] in rev2branch:
1335 for b in rev2branch[self._state[1]]:
1337 for b in rev2branch[self._state[1]]:
1336 if b.startswith('refs/remotes/origin/'):
1338 if b.startswith('refs/remotes/origin/'):
1337 return True
1339 return True
1338 for b, revision in branch2rev.iteritems():
1340 for b, revision in branch2rev.iteritems():
1339 if b.startswith('refs/remotes/origin/'):
1341 if b.startswith('refs/remotes/origin/'):
1340 if self._gitisancestor(self._state[1], revision):
1342 if self._gitisancestor(self._state[1], revision):
1341 return True
1343 return True
1342 # otherwise, try to push the currently checked out branch
1344 # otherwise, try to push the currently checked out branch
1343 cmd = ['push']
1345 cmd = ['push']
1344 if force:
1346 if force:
1345 cmd.append('--force')
1347 cmd.append('--force')
1346
1348
1347 current = self._gitcurrentbranch()
1349 current = self._gitcurrentbranch()
1348 if current:
1350 if current:
1349 # determine if the current branch is even useful
1351 # determine if the current branch is even useful
1350 if not self._gitisancestor(self._state[1], current):
1352 if not self._gitisancestor(self._state[1], current):
1351 self._ui.warn(_('unrelated git branch checked out '
1353 self._ui.warn(_('unrelated git branch checked out '
1352 'in subrepo %s\n') % self._relpath)
1354 'in subrepo %s\n') % self._relpath)
1353 return False
1355 return False
1354 self._ui.status(_('pushing branch %s of subrepo %s\n') %
1356 self._ui.status(_('pushing branch %s of subrepo %s\n') %
1355 (current.split('/', 2)[2], self._relpath))
1357 (current.split('/', 2)[2], self._relpath))
1356 self._gitcommand(cmd + ['origin', current])
1358 self._gitcommand(cmd + ['origin', current])
1357 return True
1359 return True
1358 else:
1360 else:
1359 self._ui.warn(_('no branch checked out in subrepo %s\n'
1361 self._ui.warn(_('no branch checked out in subrepo %s\n'
1360 'cannot push revision %s\n') %
1362 'cannot push revision %s\n') %
1361 (self._relpath, self._state[1]))
1363 (self._relpath, self._state[1]))
1362 return False
1364 return False
1363
1365
1364 @annotatesubrepoerror
1366 @annotatesubrepoerror
1365 def remove(self):
1367 def remove(self):
1366 if self._gitmissing():
1368 if self._gitmissing():
1367 return
1369 return
1368 if self.dirty():
1370 if self.dirty():
1369 self._ui.warn(_('not removing repo %s because '
1371 self._ui.warn(_('not removing repo %s because '
1370 'it has changes.\n') % self._relpath)
1372 'it has changes.\n') % self._relpath)
1371 return
1373 return
1372 # we can't fully delete the repository as it may contain
1374 # we can't fully delete the repository as it may contain
1373 # local-only history
1375 # local-only history
1374 self._ui.note(_('removing subrepo %s\n') % self._relpath)
1376 self._ui.note(_('removing subrepo %s\n') % self._relpath)
1375 self._gitcommand(['config', 'core.bare', 'true'])
1377 self._gitcommand(['config', 'core.bare', 'true'])
1376 for f in os.listdir(self._abspath):
1378 for f in os.listdir(self._abspath):
1377 if f == '.git':
1379 if f == '.git':
1378 continue
1380 continue
1379 path = os.path.join(self._abspath, f)
1381 path = os.path.join(self._abspath, f)
1380 if os.path.isdir(path) and not os.path.islink(path):
1382 if os.path.isdir(path) and not os.path.islink(path):
1381 shutil.rmtree(path)
1383 shutil.rmtree(path)
1382 else:
1384 else:
1383 os.remove(path)
1385 os.remove(path)
1384
1386
1385 def archive(self, ui, archiver, prefix, match=None):
1387 def archive(self, ui, archiver, prefix, match=None):
1388 total = 0
1386 source, revision = self._state
1389 source, revision = self._state
1387 if not revision:
1390 if not revision:
1388 return
1391 return total
1389 self._fetch(source, revision)
1392 self._fetch(source, revision)
1390
1393
1391 # Parse git's native archive command.
1394 # Parse git's native archive command.
1392 # This should be much faster than manually traversing the trees
1395 # This should be much faster than manually traversing the trees
1393 # and objects with many subprocess calls.
1396 # and objects with many subprocess calls.
1394 tarstream = self._gitcommand(['archive', revision], stream=True)
1397 tarstream = self._gitcommand(['archive', revision], stream=True)
1395 tar = tarfile.open(fileobj=tarstream, mode='r|')
1398 tar = tarfile.open(fileobj=tarstream, mode='r|')
1396 relpath = subrelpath(self)
1399 relpath = subrelpath(self)
1397 ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1400 ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1398 for i, info in enumerate(tar):
1401 for i, info in enumerate(tar):
1399 if info.isdir():
1402 if info.isdir():
1400 continue
1403 continue
1401 if match and not match(info.name):
1404 if match and not match(info.name):
1402 continue
1405 continue
1403 if info.issym():
1406 if info.issym():
1404 data = info.linkname
1407 data = info.linkname
1405 else:
1408 else:
1406 data = tar.extractfile(info).read()
1409 data = tar.extractfile(info).read()
1407 archiver.addfile(os.path.join(prefix, self._path, info.name),
1410 archiver.addfile(os.path.join(prefix, self._path, info.name),
1408 info.mode, info.issym(), data)
1411 info.mode, info.issym(), data)
1412 total += 1
1409 ui.progress(_('archiving (%s)') % relpath, i + 1,
1413 ui.progress(_('archiving (%s)') % relpath, i + 1,
1410 unit=_('files'))
1414 unit=_('files'))
1411 ui.progress(_('archiving (%s)') % relpath, None)
1415 ui.progress(_('archiving (%s)') % relpath, None)
1416 return total
1412
1417
1413
1418
1414 @annotatesubrepoerror
1419 @annotatesubrepoerror
1415 def status(self, rev2, **opts):
1420 def status(self, rev2, **opts):
1416 rev1 = self._state[1]
1421 rev1 = self._state[1]
1417 if self._gitmissing() or not rev1:
1422 if self._gitmissing() or not rev1:
1418 # if the repo is missing, return no results
1423 # if the repo is missing, return no results
1419 return [], [], [], [], [], [], []
1424 return [], [], [], [], [], [], []
1420 modified, added, removed = [], [], []
1425 modified, added, removed = [], [], []
1421 self._gitupdatestat()
1426 self._gitupdatestat()
1422 if rev2:
1427 if rev2:
1423 command = ['diff-tree', rev1, rev2]
1428 command = ['diff-tree', rev1, rev2]
1424 else:
1429 else:
1425 command = ['diff-index', rev1]
1430 command = ['diff-index', rev1]
1426 out = self._gitcommand(command)
1431 out = self._gitcommand(command)
1427 for line in out.split('\n'):
1432 for line in out.split('\n'):
1428 tab = line.find('\t')
1433 tab = line.find('\t')
1429 if tab == -1:
1434 if tab == -1:
1430 continue
1435 continue
1431 status, f = line[tab - 1], line[tab + 1:]
1436 status, f = line[tab - 1], line[tab + 1:]
1432 if status == 'M':
1437 if status == 'M':
1433 modified.append(f)
1438 modified.append(f)
1434 elif status == 'A':
1439 elif status == 'A':
1435 added.append(f)
1440 added.append(f)
1436 elif status == 'D':
1441 elif status == 'D':
1437 removed.append(f)
1442 removed.append(f)
1438
1443
1439 deleted = unknown = ignored = clean = []
1444 deleted = unknown = ignored = clean = []
1440 return modified, added, removed, deleted, unknown, ignored, clean
1445 return modified, added, removed, deleted, unknown, ignored, clean
1441
1446
1442 types = {
1447 types = {
1443 'hg': hgsubrepo,
1448 'hg': hgsubrepo,
1444 'svn': svnsubrepo,
1449 'svn': svnsubrepo,
1445 'git': gitsubrepo,
1450 'git': gitsubrepo,
1446 }
1451 }
@@ -1,320 +1,330 b''
1 $ "$TESTDIR/hghave" serve || exit 80
1 $ "$TESTDIR/hghave" serve || exit 80
2
2
3 $ hg init test
3 $ hg init test
4 $ cd test
4 $ cd test
5 $ echo foo>foo
5 $ echo foo>foo
6 $ hg commit -Am 1 -d '1 0'
6 $ hg commit -Am 1 -d '1 0'
7 adding foo
7 adding foo
8 $ echo bar>bar
8 $ echo bar>bar
9 $ hg commit -Am 2 -d '2 0'
9 $ hg commit -Am 2 -d '2 0'
10 adding bar
10 adding bar
11 $ mkdir baz
11 $ mkdir baz
12 $ echo bletch>baz/bletch
12 $ echo bletch>baz/bletch
13 $ hg commit -Am 3 -d '1000000000 0'
13 $ hg commit -Am 3 -d '1000000000 0'
14 adding baz/bletch
14 adding baz/bletch
15 $ echo "[web]" >> .hg/hgrc
15 $ echo "[web]" >> .hg/hgrc
16 $ echo "name = test-archive" >> .hg/hgrc
16 $ echo "name = test-archive" >> .hg/hgrc
17 $ cp .hg/hgrc .hg/hgrc-base
17 $ cp .hg/hgrc .hg/hgrc-base
18 > test_archtype() {
18 > test_archtype() {
19 > echo "allow_archive = $1" >> .hg/hgrc
19 > echo "allow_archive = $1" >> .hg/hgrc
20 > hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
20 > hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
21 > cat hg.pid >> $DAEMON_PIDS
21 > cat hg.pid >> $DAEMON_PIDS
22 > echo % $1 allowed should give 200
22 > echo % $1 allowed should give 200
23 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$2" | head -n 1
23 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$2" | head -n 1
24 > echo % $3 and $4 disallowed should both give 403
24 > echo % $3 and $4 disallowed should both give 403
25 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$3" | head -n 1
25 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$3" | head -n 1
26 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$4" | head -n 1
26 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$4" | head -n 1
27 > "$TESTDIR/killdaemons.py" $DAEMON_PIDS
27 > "$TESTDIR/killdaemons.py" $DAEMON_PIDS
28 > cat errors.log
28 > cat errors.log
29 > cp .hg/hgrc-base .hg/hgrc
29 > cp .hg/hgrc-base .hg/hgrc
30 > }
30 > }
31
31
32 check http return codes
32 check http return codes
33
33
34 $ test_archtype gz tar.gz tar.bz2 zip
34 $ test_archtype gz tar.gz tar.bz2 zip
35 % gz allowed should give 200
35 % gz allowed should give 200
36 200 Script output follows
36 200 Script output follows
37 % tar.bz2 and zip disallowed should both give 403
37 % tar.bz2 and zip disallowed should both give 403
38 403 Archive type not allowed: bz2
38 403 Archive type not allowed: bz2
39 403 Archive type not allowed: zip
39 403 Archive type not allowed: zip
40 $ test_archtype bz2 tar.bz2 zip tar.gz
40 $ test_archtype bz2 tar.bz2 zip tar.gz
41 % bz2 allowed should give 200
41 % bz2 allowed should give 200
42 200 Script output follows
42 200 Script output follows
43 % zip and tar.gz disallowed should both give 403
43 % zip and tar.gz disallowed should both give 403
44 403 Archive type not allowed: zip
44 403 Archive type not allowed: zip
45 403 Archive type not allowed: gz
45 403 Archive type not allowed: gz
46 $ test_archtype zip zip tar.gz tar.bz2
46 $ test_archtype zip zip tar.gz tar.bz2
47 % zip allowed should give 200
47 % zip allowed should give 200
48 200 Script output follows
48 200 Script output follows
49 % tar.gz and tar.bz2 disallowed should both give 403
49 % tar.gz and tar.bz2 disallowed should both give 403
50 403 Archive type not allowed: gz
50 403 Archive type not allowed: gz
51 403 Archive type not allowed: bz2
51 403 Archive type not allowed: bz2
52
52
53 $ echo "allow_archive = gz bz2 zip" >> .hg/hgrc
53 $ echo "allow_archive = gz bz2 zip" >> .hg/hgrc
54 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
54 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
55 $ cat hg.pid >> $DAEMON_PIDS
55 $ cat hg.pid >> $DAEMON_PIDS
56
56
57 invalid arch type should give 404
57 invalid arch type should give 404
58
58
59 $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.invalid" | head -n 1
59 $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.invalid" | head -n 1
60 404 Unsupported archive type: None
60 404 Unsupported archive type: None
61
61
62 $ TIP=`hg id -v | cut -f1 -d' '`
62 $ TIP=`hg id -v | cut -f1 -d' '`
63 $ QTIP=`hg id -q`
63 $ QTIP=`hg id -q`
64 $ cat > getarchive.py <<EOF
64 $ cat > getarchive.py <<EOF
65 > import os, sys, urllib2
65 > import os, sys, urllib2
66 > try:
66 > try:
67 > # Set stdout to binary mode for win32 platforms
67 > # Set stdout to binary mode for win32 platforms
68 > import msvcrt
68 > import msvcrt
69 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
69 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
70 > except ImportError:
70 > except ImportError:
71 > pass
71 > pass
72 > if len(sys.argv) <= 3:
72 > if len(sys.argv) <= 3:
73 > node, archive = sys.argv[1:]
73 > node, archive = sys.argv[1:]
74 > requeststr = 'cmd=archive;node=%s;type=%s' % (node, archive)
74 > requeststr = 'cmd=archive;node=%s;type=%s' % (node, archive)
75 > else:
75 > else:
76 > node, archive, file = sys.argv[1:]
76 > node, archive, file = sys.argv[1:]
77 > requeststr = 'cmd=archive;node=%s;type=%s;file=%s' % (node, archive, file)
77 > requeststr = 'cmd=archive;node=%s;type=%s;file=%s' % (node, archive, file)
78 > try:
78 > try:
79 > f = urllib2.urlopen('http://127.0.0.1:%s/?%s'
79 > f = urllib2.urlopen('http://127.0.0.1:%s/?%s'
80 > % (os.environ['HGPORT'], requeststr))
80 > % (os.environ['HGPORT'], requeststr))
81 > sys.stdout.write(f.read())
81 > sys.stdout.write(f.read())
82 > except urllib2.HTTPError, e:
82 > except urllib2.HTTPError, e:
83 > sys.stderr.write(str(e) + '\n')
83 > sys.stderr.write(str(e) + '\n')
84 > EOF
84 > EOF
85 $ python getarchive.py "$TIP" gz | gunzip | tar tf - 2>/dev/null
85 $ python getarchive.py "$TIP" gz | gunzip | tar tf - 2>/dev/null
86 test-archive-2c0277f05ed4/.hg_archival.txt
86 test-archive-2c0277f05ed4/.hg_archival.txt
87 test-archive-2c0277f05ed4/bar
87 test-archive-2c0277f05ed4/bar
88 test-archive-2c0277f05ed4/baz/bletch
88 test-archive-2c0277f05ed4/baz/bletch
89 test-archive-2c0277f05ed4/foo
89 test-archive-2c0277f05ed4/foo
90 $ python getarchive.py "$TIP" bz2 | bunzip2 | tar tf - 2>/dev/null
90 $ python getarchive.py "$TIP" bz2 | bunzip2 | tar tf - 2>/dev/null
91 test-archive-2c0277f05ed4/.hg_archival.txt
91 test-archive-2c0277f05ed4/.hg_archival.txt
92 test-archive-2c0277f05ed4/bar
92 test-archive-2c0277f05ed4/bar
93 test-archive-2c0277f05ed4/baz/bletch
93 test-archive-2c0277f05ed4/baz/bletch
94 test-archive-2c0277f05ed4/foo
94 test-archive-2c0277f05ed4/foo
95 $ python getarchive.py "$TIP" zip > archive.zip
95 $ python getarchive.py "$TIP" zip > archive.zip
96 $ unzip -t archive.zip
96 $ unzip -t archive.zip
97 Archive: archive.zip
97 Archive: archive.zip
98 testing: test-archive-2c0277f05ed4/.hg_archival.txt OK
98 testing: test-archive-2c0277f05ed4/.hg_archival.txt OK
99 testing: test-archive-2c0277f05ed4/bar OK
99 testing: test-archive-2c0277f05ed4/bar OK
100 testing: test-archive-2c0277f05ed4/baz/bletch OK
100 testing: test-archive-2c0277f05ed4/baz/bletch OK
101 testing: test-archive-2c0277f05ed4/foo OK
101 testing: test-archive-2c0277f05ed4/foo OK
102 No errors detected in compressed data of archive.zip.
102 No errors detected in compressed data of archive.zip.
103
103
104 test that we can download single directories and files
104 test that we can download single directories and files
105
105
106 $ python getarchive.py "$TIP" gz baz | gunzip | tar tf - 2>/dev/null
106 $ python getarchive.py "$TIP" gz baz | gunzip | tar tf - 2>/dev/null
107 test-archive-2c0277f05ed4/baz/bletch
107 test-archive-2c0277f05ed4/baz/bletch
108 $ python getarchive.py "$TIP" gz foo | gunzip | tar tf - 2>/dev/null
108 $ python getarchive.py "$TIP" gz foo | gunzip | tar tf - 2>/dev/null
109 test-archive-2c0277f05ed4/foo
109 test-archive-2c0277f05ed4/foo
110
110
111 test that we reject unsafe patterns
111 test that we reject unsafe patterns
112
112
113 $ python getarchive.py "$TIP" gz relre:baz
113 $ python getarchive.py "$TIP" gz relre:baz
114 HTTP Error 403: Archive pattern not allowed: relre:baz
114 HTTP Error 403: Archive pattern not allowed: relre:baz
115
115
116 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
116 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
117
117
118 $ hg archive -t tar test.tar
118 $ hg archive -t tar test.tar
119 $ tar tf test.tar
119 $ tar tf test.tar
120 test/.hg_archival.txt
120 test/.hg_archival.txt
121 test/bar
121 test/bar
122 test/baz/bletch
122 test/baz/bletch
123 test/foo
123 test/foo
124
124
125 $ hg archive --debug -t tbz2 -X baz test.tar.bz2
125 $ hg archive --debug -t tbz2 -X baz test.tar.bz2
126 archiving: 0/2 files (0.00%)
126 archiving: 0/2 files (0.00%)
127 archiving: bar 1/2 files (50.00%)
127 archiving: bar 1/2 files (50.00%)
128 archiving: foo 2/2 files (100.00%)
128 archiving: foo 2/2 files (100.00%)
129 $ bunzip2 -dc test.tar.bz2 | tar tf - 2>/dev/null
129 $ bunzip2 -dc test.tar.bz2 | tar tf - 2>/dev/null
130 test/.hg_archival.txt
130 test/.hg_archival.txt
131 test/bar
131 test/bar
132 test/foo
132 test/foo
133
133
134 $ hg archive -t tgz -p %b-%h test-%h.tar.gz
134 $ hg archive -t tgz -p %b-%h test-%h.tar.gz
135 $ gzip -dc test-$QTIP.tar.gz | tar tf - 2>/dev/null
135 $ gzip -dc test-$QTIP.tar.gz | tar tf - 2>/dev/null
136 test-2c0277f05ed4/.hg_archival.txt
136 test-2c0277f05ed4/.hg_archival.txt
137 test-2c0277f05ed4/bar
137 test-2c0277f05ed4/bar
138 test-2c0277f05ed4/baz/bletch
138 test-2c0277f05ed4/baz/bletch
139 test-2c0277f05ed4/foo
139 test-2c0277f05ed4/foo
140
140
141 $ hg archive autodetected_test.tar
141 $ hg archive autodetected_test.tar
142 $ tar tf autodetected_test.tar
142 $ tar tf autodetected_test.tar
143 autodetected_test/.hg_archival.txt
143 autodetected_test/.hg_archival.txt
144 autodetected_test/bar
144 autodetected_test/bar
145 autodetected_test/baz/bletch
145 autodetected_test/baz/bletch
146 autodetected_test/foo
146 autodetected_test/foo
147
147
148 The '-t' should override autodetection
148 The '-t' should override autodetection
149
149
150 $ hg archive -t tar autodetect_override_test.zip
150 $ hg archive -t tar autodetect_override_test.zip
151 $ tar tf autodetect_override_test.zip
151 $ tar tf autodetect_override_test.zip
152 autodetect_override_test.zip/.hg_archival.txt
152 autodetect_override_test.zip/.hg_archival.txt
153 autodetect_override_test.zip/bar
153 autodetect_override_test.zip/bar
154 autodetect_override_test.zip/baz/bletch
154 autodetect_override_test.zip/baz/bletch
155 autodetect_override_test.zip/foo
155 autodetect_override_test.zip/foo
156
156
157 $ for ext in tar tar.gz tgz tar.bz2 tbz2 zip; do
157 $ for ext in tar tar.gz tgz tar.bz2 tbz2 zip; do
158 > hg archive auto_test.$ext
158 > hg archive auto_test.$ext
159 > if [ -d auto_test.$ext ]; then
159 > if [ -d auto_test.$ext ]; then
160 > echo "extension $ext was not autodetected."
160 > echo "extension $ext was not autodetected."
161 > fi
161 > fi
162 > done
162 > done
163
163
164 $ cat > md5comp.py <<EOF
164 $ cat > md5comp.py <<EOF
165 > try:
165 > try:
166 > from hashlib import md5
166 > from hashlib import md5
167 > except ImportError:
167 > except ImportError:
168 > from md5 import md5
168 > from md5 import md5
169 > import sys
169 > import sys
170 > f1, f2 = sys.argv[1:3]
170 > f1, f2 = sys.argv[1:3]
171 > h1 = md5(file(f1, 'rb').read()).hexdigest()
171 > h1 = md5(file(f1, 'rb').read()).hexdigest()
172 > h2 = md5(file(f2, 'rb').read()).hexdigest()
172 > h2 = md5(file(f2, 'rb').read()).hexdigest()
173 > print h1 == h2 or "md5 differ: " + repr((h1, h2))
173 > print h1 == h2 or "md5 differ: " + repr((h1, h2))
174 > EOF
174 > EOF
175
175
176 archive name is stored in the archive, so create similar archives and
176 archive name is stored in the archive, so create similar archives and
177 rename them afterwards.
177 rename them afterwards.
178
178
179 $ hg archive -t tgz tip.tar.gz
179 $ hg archive -t tgz tip.tar.gz
180 $ mv tip.tar.gz tip1.tar.gz
180 $ mv tip.tar.gz tip1.tar.gz
181 $ sleep 1
181 $ sleep 1
182 $ hg archive -t tgz tip.tar.gz
182 $ hg archive -t tgz tip.tar.gz
183 $ mv tip.tar.gz tip2.tar.gz
183 $ mv tip.tar.gz tip2.tar.gz
184 $ python md5comp.py tip1.tar.gz tip2.tar.gz
184 $ python md5comp.py tip1.tar.gz tip2.tar.gz
185 True
185 True
186
186
187 $ hg archive -t zip -p /illegal test.zip
187 $ hg archive -t zip -p /illegal test.zip
188 abort: archive prefix contains illegal components
188 abort: archive prefix contains illegal components
189 [255]
189 [255]
190 $ hg archive -t zip -p very/../bad test.zip
190 $ hg archive -t zip -p very/../bad test.zip
191
191
192 $ hg archive --config ui.archivemeta=false -t zip -r 2 test.zip
192 $ hg archive --config ui.archivemeta=false -t zip -r 2 test.zip
193 $ unzip -t test.zip
193 $ unzip -t test.zip
194 Archive: test.zip
194 Archive: test.zip
195 testing: test/bar OK
195 testing: test/bar OK
196 testing: test/baz/bletch OK
196 testing: test/baz/bletch OK
197 testing: test/foo OK
197 testing: test/foo OK
198 No errors detected in compressed data of test.zip.
198 No errors detected in compressed data of test.zip.
199
199
200 $ hg archive -t tar - | tar tf - 2>/dev/null
200 $ hg archive -t tar - | tar tf - 2>/dev/null
201 test-2c0277f05ed4/.hg_archival.txt
201 test-2c0277f05ed4/.hg_archival.txt
202 test-2c0277f05ed4/bar
202 test-2c0277f05ed4/bar
203 test-2c0277f05ed4/baz/bletch
203 test-2c0277f05ed4/baz/bletch
204 test-2c0277f05ed4/foo
204 test-2c0277f05ed4/foo
205
205
206 $ hg archive -r 0 -t tar rev-%r.tar
206 $ hg archive -r 0 -t tar rev-%r.tar
207 $ if [ -f rev-0.tar ]; then
207 $ if [ -f rev-0.tar ]; then
208 $ fi
208 $ fi
209
209
210 test .hg_archival.txt
210 test .hg_archival.txt
211
211
212 $ hg archive ../test-tags
212 $ hg archive ../test-tags
213 $ cat ../test-tags/.hg_archival.txt
213 $ cat ../test-tags/.hg_archival.txt
214 repo: daa7f7c60e0a224faa4ff77ca41b2760562af264
214 repo: daa7f7c60e0a224faa4ff77ca41b2760562af264
215 node: 2c0277f05ed49d1c8328fb9ba92fba7a5ebcb33e
215 node: 2c0277f05ed49d1c8328fb9ba92fba7a5ebcb33e
216 branch: default
216 branch: default
217 latesttag: null
217 latesttag: null
218 latesttagdistance: 3
218 latesttagdistance: 3
219 $ hg tag -r 2 mytag
219 $ hg tag -r 2 mytag
220 $ hg tag -r 2 anothertag
220 $ hg tag -r 2 anothertag
221 $ hg archive -r 2 ../test-lasttag
221 $ hg archive -r 2 ../test-lasttag
222 $ cat ../test-lasttag/.hg_archival.txt
222 $ cat ../test-lasttag/.hg_archival.txt
223 repo: daa7f7c60e0a224faa4ff77ca41b2760562af264
223 repo: daa7f7c60e0a224faa4ff77ca41b2760562af264
224 node: 2c0277f05ed49d1c8328fb9ba92fba7a5ebcb33e
224 node: 2c0277f05ed49d1c8328fb9ba92fba7a5ebcb33e
225 branch: default
225 branch: default
226 tag: anothertag
226 tag: anothertag
227 tag: mytag
227 tag: mytag
228
228
229 $ hg archive -t bogus test.bogus
229 $ hg archive -t bogus test.bogus
230 abort: unknown archive type 'bogus'
230 abort: unknown archive type 'bogus'
231 [255]
231 [255]
232
232
233 enable progress extension:
233 enable progress extension:
234
234
235 $ cp $HGRCPATH $HGRCPATH.no-progress
235 $ cp $HGRCPATH $HGRCPATH.no-progress
236 $ cat >> $HGRCPATH <<EOF
236 $ cat >> $HGRCPATH <<EOF
237 > [extensions]
237 > [extensions]
238 > progress =
238 > progress =
239 > [progress]
239 > [progress]
240 > assume-tty = 1
240 > assume-tty = 1
241 > format = topic bar number
241 > format = topic bar number
242 > delay = 0
242 > delay = 0
243 > refresh = 0
243 > refresh = 0
244 > width = 60
244 > width = 60
245 > EOF
245 > EOF
246
246
247 $ hg archive ../with-progress
247 $ hg archive ../with-progress
248 \r (no-eol) (esc)
248 \r (no-eol) (esc)
249 archiving [ ] 0/4\r (no-eol) (esc)
249 archiving [ ] 0/4\r (no-eol) (esc)
250 archiving [ ] 0/4\r (no-eol) (esc)
250 archiving [ ] 0/4\r (no-eol) (esc)
251 archiving [=========> ] 1/4\r (no-eol) (esc)
251 archiving [=========> ] 1/4\r (no-eol) (esc)
252 archiving [=========> ] 1/4\r (no-eol) (esc)
252 archiving [=========> ] 1/4\r (no-eol) (esc)
253 archiving [====================> ] 2/4\r (no-eol) (esc)
253 archiving [====================> ] 2/4\r (no-eol) (esc)
254 archiving [====================> ] 2/4\r (no-eol) (esc)
254 archiving [====================> ] 2/4\r (no-eol) (esc)
255 archiving [===============================> ] 3/4\r (no-eol) (esc)
255 archiving [===============================> ] 3/4\r (no-eol) (esc)
256 archiving [===============================> ] 3/4\r (no-eol) (esc)
256 archiving [===============================> ] 3/4\r (no-eol) (esc)
257 archiving [==========================================>] 4/4\r (no-eol) (esc)
257 archiving [==========================================>] 4/4\r (no-eol) (esc)
258 archiving [==========================================>] 4/4\r (no-eol) (esc)
258 archiving [==========================================>] 4/4\r (no-eol) (esc)
259 \r (no-eol) (esc)
259 \r (no-eol) (esc)
260
260
261 cleanup after progress extension test:
261 cleanup after progress extension test:
262
262
263 $ cp $HGRCPATH.no-progress $HGRCPATH
263 $ cp $HGRCPATH.no-progress $HGRCPATH
264
264
265 server errors
265 server errors
266
266
267 $ cat errors.log
267 $ cat errors.log
268
268
269 empty repo
269 empty repo
270
270
271 $ hg init ../empty
271 $ hg init ../empty
272 $ cd ../empty
272 $ cd ../empty
273 $ hg archive ../test-empty
273 $ hg archive ../test-empty
274 abort: no working directory: please specify a revision
274 abort: no working directory: please specify a revision
275 [255]
275 [255]
276
276
277 old file -- date clamped to 1980
277 old file -- date clamped to 1980
278
278
279 $ touch -t 197501010000 old
279 $ touch -t 197501010000 old
280 $ hg add old
280 $ hg add old
281 $ hg commit -m old
281 $ hg commit -m old
282 $ hg archive ../old.zip
282 $ hg archive ../old.zip
283 $ unzip -l ../old.zip
283 $ unzip -l ../old.zip
284 Archive: ../old.zip
284 Archive: ../old.zip
285 \s*Length.* (re)
285 \s*Length.* (re)
286 *-----* (glob)
286 *-----* (glob)
287 *147*80*00:00*old/.hg_archival.txt (glob)
287 *147*80*00:00*old/.hg_archival.txt (glob)
288 *0*80*00:00*old/old (glob)
288 *0*80*00:00*old/old (glob)
289 *-----* (glob)
289 *-----* (glob)
290 \s*147\s+2 files (re)
290 \s*147\s+2 files (re)
291
291
292 show an error when a provided pattern matches no files
293
294 $ hg archive -I file_that_does_not_exist.foo ../empty.zip
295 abort: no files match the archive pattern
296 [255]
297
298 $ hg archive -X * ../empty.zip
299 abort: no files match the archive pattern
300 [255]
301
292 $ cd ..
302 $ cd ..
293
303
294 issue3600: check whether "hg archive" can create archive files which
304 issue3600: check whether "hg archive" can create archive files which
295 are extracted with expected timestamp, even though TZ is not
305 are extracted with expected timestamp, even though TZ is not
296 configured as GMT.
306 configured as GMT.
297
307
298 $ mkdir issue3600
308 $ mkdir issue3600
299 $ cd issue3600
309 $ cd issue3600
300
310
301 $ hg init repo
311 $ hg init repo
302 $ echo a > repo/a
312 $ echo a > repo/a
303 $ hg -R repo add repo/a
313 $ hg -R repo add repo/a
304 $ hg -R repo commit -m '#0' -d '456789012 21600'
314 $ hg -R repo commit -m '#0' -d '456789012 21600'
305 $ cat > show_mtime.py <<EOF
315 $ cat > show_mtime.py <<EOF
306 > import sys, os
316 > import sys, os
307 > print int(os.stat(sys.argv[1]).st_mtime)
317 > print int(os.stat(sys.argv[1]).st_mtime)
308 > EOF
318 > EOF
309
319
310 $ hg -R repo archive --prefix tar-extracted archive.tar
320 $ hg -R repo archive --prefix tar-extracted archive.tar
311 $ (TZ=UTC-3; export TZ; tar xf archive.tar)
321 $ (TZ=UTC-3; export TZ; tar xf archive.tar)
312 $ python show_mtime.py tar-extracted/a
322 $ python show_mtime.py tar-extracted/a
313 456789012
323 456789012
314
324
315 $ hg -R repo archive --prefix zip-extracted archive.zip
325 $ hg -R repo archive --prefix zip-extracted archive.zip
316 $ (TZ=UTC-3; export TZ; unzip -q archive.zip)
326 $ (TZ=UTC-3; export TZ; unzip -q archive.zip)
317 $ python show_mtime.py zip-extracted/a
327 $ python show_mtime.py zip-extracted/a
318 456789012
328 456789012
319
329
320 $ cd ..
330 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now