##// END OF EJS Templates
archive: raise error.Abort if the file pattern matches no files...
Angel Ezquerra -
r18967:88d1b59f default
parent child Browse files
Show More
@@ -1,307 +1,313 b''
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from node import hex
10 10 import match as matchmod
11 11 import cmdutil
12 12 import scmutil, util, encoding
13 13 import cStringIO, os, tarfile, time, zipfile
14 14 import zlib, gzip
15 15 import struct
16 import error
16 17
17 18 # from unzip source code:
18 19 _UNX_IFREG = 0x8000
19 20 _UNX_IFLNK = 0xa000
20 21
21 22 def tidyprefix(dest, kind, prefix):
22 23 '''choose prefix to use for names in archive. make sure prefix is
23 24 safe for consumers.'''
24 25
25 26 if prefix:
26 27 prefix = util.normpath(prefix)
27 28 else:
28 29 if not isinstance(dest, str):
29 30 raise ValueError('dest must be string if no prefix')
30 31 prefix = os.path.basename(dest)
31 32 lower = prefix.lower()
32 33 for sfx in exts.get(kind, []):
33 34 if lower.endswith(sfx):
34 35 prefix = prefix[:-len(sfx)]
35 36 break
36 37 lpfx = os.path.normpath(util.localpath(prefix))
37 38 prefix = util.pconvert(lpfx)
38 39 if not prefix.endswith('/'):
39 40 prefix += '/'
40 41 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
41 42 raise util.Abort(_('archive prefix contains illegal components'))
42 43 return prefix
43 44
44 45 exts = {
45 46 'tar': ['.tar'],
46 47 'tbz2': ['.tbz2', '.tar.bz2'],
47 48 'tgz': ['.tgz', '.tar.gz'],
48 49 'zip': ['.zip'],
49 50 }
50 51
51 52 def guesskind(dest):
52 53 for kind, extensions in exts.iteritems():
53 54 if util.any(dest.endswith(ext) for ext in extensions):
54 55 return kind
55 56 return None
56 57
57 58
58 59 class tarit(object):
59 60 '''write archive to tar file or stream. can write uncompressed,
60 61 or compress with gzip or bzip2.'''
61 62
62 63 class GzipFileWithTime(gzip.GzipFile):
63 64
64 65 def __init__(self, *args, **kw):
65 66 timestamp = None
66 67 if 'timestamp' in kw:
67 68 timestamp = kw.pop('timestamp')
68 69 if timestamp is None:
69 70 self.timestamp = time.time()
70 71 else:
71 72 self.timestamp = timestamp
72 73 gzip.GzipFile.__init__(self, *args, **kw)
73 74
74 75 def _write_gzip_header(self):
75 76 self.fileobj.write('\037\213') # magic header
76 77 self.fileobj.write('\010') # compression method
77 78 # Python 2.6 introduced self.name and deprecated self.filename
78 79 try:
79 80 fname = self.name
80 81 except AttributeError:
81 82 fname = self.filename
82 83 if fname and fname.endswith('.gz'):
83 84 fname = fname[:-3]
84 85 flags = 0
85 86 if fname:
86 87 flags = gzip.FNAME
87 88 self.fileobj.write(chr(flags))
88 89 gzip.write32u(self.fileobj, long(self.timestamp))
89 90 self.fileobj.write('\002')
90 91 self.fileobj.write('\377')
91 92 if fname:
92 93 self.fileobj.write(fname + '\000')
93 94
94 95 def __init__(self, dest, mtime, kind=''):
95 96 self.mtime = mtime
96 97 self.fileobj = None
97 98
98 99 def taropen(name, mode, fileobj=None):
99 100 if kind == 'gz':
100 101 mode = mode[0]
101 102 if not fileobj:
102 103 fileobj = open(name, mode + 'b')
103 104 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
104 105 zlib.Z_BEST_COMPRESSION,
105 106 fileobj, timestamp=mtime)
106 107 self.fileobj = gzfileobj
107 108 return tarfile.TarFile.taropen(name, mode, gzfileobj)
108 109 else:
109 110 return tarfile.open(name, mode + kind, fileobj)
110 111
111 112 if isinstance(dest, str):
112 113 self.z = taropen(dest, mode='w:')
113 114 else:
114 115 # Python 2.5-2.5.1 have a regression that requires a name arg
115 116 self.z = taropen(name='', mode='w|', fileobj=dest)
116 117
117 118 def addfile(self, name, mode, islink, data):
118 119 i = tarfile.TarInfo(name)
119 120 i.mtime = self.mtime
120 121 i.size = len(data)
121 122 if islink:
122 123 i.type = tarfile.SYMTYPE
123 124 i.mode = 0777
124 125 i.linkname = data
125 126 data = None
126 127 i.size = 0
127 128 else:
128 129 i.mode = mode
129 130 data = cStringIO.StringIO(data)
130 131 self.z.addfile(i, data)
131 132
132 133 def done(self):
133 134 self.z.close()
134 135 if self.fileobj:
135 136 self.fileobj.close()
136 137
137 138 class tellable(object):
138 139 '''provide tell method for zipfile.ZipFile when writing to http
139 140 response file object.'''
140 141
141 142 def __init__(self, fp):
142 143 self.fp = fp
143 144 self.offset = 0
144 145
145 146 def __getattr__(self, key):
146 147 return getattr(self.fp, key)
147 148
148 149 def write(self, s):
149 150 self.fp.write(s)
150 151 self.offset += len(s)
151 152
152 153 def tell(self):
153 154 return self.offset
154 155
155 156 class zipit(object):
156 157 '''write archive to zip file or stream. can write uncompressed,
157 158 or compressed with deflate.'''
158 159
159 160 def __init__(self, dest, mtime, compress=True):
160 161 if not isinstance(dest, str):
161 162 try:
162 163 dest.tell()
163 164 except (AttributeError, IOError):
164 165 dest = tellable(dest)
165 166 self.z = zipfile.ZipFile(dest, 'w',
166 167 compress and zipfile.ZIP_DEFLATED or
167 168 zipfile.ZIP_STORED)
168 169
169 170 # Python's zipfile module emits deprecation warnings if we try
170 171 # to store files with a date before 1980.
171 172 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
172 173 if mtime < epoch:
173 174 mtime = epoch
174 175
175 176 self.mtime = mtime
176 177 self.date_time = time.gmtime(mtime)[:6]
177 178
178 179 def addfile(self, name, mode, islink, data):
179 180 i = zipfile.ZipInfo(name, self.date_time)
180 181 i.compress_type = self.z.compression
181 182 # unzip will not honor unix file modes unless file creator is
182 183 # set to unix (id 3).
183 184 i.create_system = 3
184 185 ftype = _UNX_IFREG
185 186 if islink:
186 187 mode = 0777
187 188 ftype = _UNX_IFLNK
188 189 i.external_attr = (mode | ftype) << 16L
189 190 # add "extended-timestamp" extra block, because zip archives
190 191 # without this will be extracted with unexpected timestamp,
191 192 # if TZ is not configured as GMT
192 193 i.extra += struct.pack('<hhBl',
193 194 0x5455, # block type: "extended-timestamp"
194 195 1 + 4, # size of this block
195 196 1, # "modification time is present"
196 197 int(self.mtime)) # last modification (UTC)
197 198 self.z.writestr(i, data)
198 199
199 200 def done(self):
200 201 self.z.close()
201 202
202 203 class fileit(object):
203 204 '''write archive as files in directory.'''
204 205
205 206 def __init__(self, name, mtime):
206 207 self.basedir = name
207 208 self.opener = scmutil.opener(self.basedir)
208 209
209 210 def addfile(self, name, mode, islink, data):
210 211 if islink:
211 212 self.opener.symlink(data, name)
212 213 return
213 214 f = self.opener(name, "w", atomictemp=True)
214 215 f.write(data)
215 216 f.close()
216 217 destfile = os.path.join(self.basedir, name)
217 218 os.chmod(destfile, mode)
218 219
219 220 def done(self):
220 221 pass
221 222
222 223 archivers = {
223 224 'files': fileit,
224 225 'tar': tarit,
225 226 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
226 227 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
227 228 'uzip': lambda name, mtime: zipit(name, mtime, False),
228 229 'zip': zipit,
229 230 }
230 231
231 232 def archive(repo, dest, node, kind, decode=True, matchfn=None,
232 233 prefix=None, mtime=None, subrepos=False):
233 234 '''create archive of repo as it was at node.
234 235
235 236 dest can be name of directory, name of archive file, or file
236 237 object to write archive to.
237 238
238 239 kind is type of archive to create.
239 240
240 241 decode tells whether to put files through decode filters from
241 242 hgrc.
242 243
243 244 matchfn is function to filter names of files to write to archive.
244 245
245 246 prefix is name of path to put before every archive member.'''
246 247
247 248 if kind == 'files':
248 249 if prefix:
249 250 raise util.Abort(_('cannot give prefix when archiving to files'))
250 251 else:
251 252 prefix = tidyprefix(dest, kind, prefix)
252 253
253 254 def write(name, mode, islink, getdata):
254 255 data = getdata()
255 256 if decode:
256 257 data = repo.wwritedata(name, data)
257 258 archiver.addfile(prefix + name, mode, islink, data)
258 259
259 260 if kind not in archivers:
260 261 raise util.Abort(_("unknown archive type '%s'") % kind)
261 262
262 263 ctx = repo[node]
263 264 archiver = archivers[kind](dest, mtime or ctx.date()[0])
264 265
265 266 if repo.ui.configbool("ui", "archivemeta", True):
266 267 def metadata():
267 268 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
268 269 repo[0].hex(), hex(node), encoding.fromlocal(ctx.branch()))
269 270
270 271 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
271 272 if repo.tagtype(t) == 'global')
272 273 if not tags:
273 274 repo.ui.pushbuffer()
274 275 opts = {'template': '{latesttag}\n{latesttagdistance}',
275 276 'style': '', 'patch': None, 'git': None}
276 277 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
277 278 ltags, dist = repo.ui.popbuffer().split('\n')
278 279 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
279 280 tags += 'latesttagdistance: %s\n' % dist
280 281
281 282 return base + tags
282 283
283 284 name = '.hg_archival.txt'
284 285 if not matchfn or matchfn(name):
285 286 write(name, 0644, False, metadata)
286 287
287 288 if matchfn:
288 289 files = [f for f in ctx.manifest().keys() if matchfn(f)]
289 290 else:
290 291 files = ctx.manifest().keys()
291 files.sort()
292 292 total = len(files)
293 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
294 for i, f in enumerate(files):
295 ff = ctx.flags(f)
296 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
297 repo.ui.progress(_('archiving'), i + 1, item=f,
298 unit=_('files'), total=total)
299 repo.ui.progress(_('archiving'), None)
293 if total:
294 files.sort()
295 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
296 for i, f in enumerate(files):
297 ff = ctx.flags(f)
298 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
299 repo.ui.progress(_('archiving'), i + 1, item=f,
300 unit=_('files'), total=total)
301 repo.ui.progress(_('archiving'), None)
300 302
301 303 if subrepos:
302 304 for subpath in sorted(ctx.substate):
303 305 sub = ctx.sub(subpath)
304 306 submatch = matchmod.narrowmatcher(subpath, matchfn)
305 sub.archive(repo.ui, archiver, prefix, submatch)
307 total += sub.archive(repo.ui, archiver, prefix, submatch)
308
309 if total == 0:
310 raise error.Abort(_('no files match the archive pattern'))
306 311
307 312 archiver.done()
313 return total
@@ -1,1446 +1,1451 b''
1 1 # subrepo.py - sub-repository handling for Mercurial
2 2 #
3 3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import errno, os, re, xml.dom.minidom, shutil, posixpath, sys
9 9 import stat, subprocess, tarfile
10 10 from i18n import _
11 11 import config, scmutil, util, node, error, cmdutil, bookmarks, match as matchmod
12 12 hg = None
13 13 propertycache = util.propertycache
14 14
15 15 nullstate = ('', '', 'empty')
16 16
17 17 def _expandedabspath(path):
18 18 '''
19 19 get a path or url and if it is a path expand it and return an absolute path
20 20 '''
21 21 expandedpath = util.urllocalpath(util.expandpath(path))
22 22 u = util.url(expandedpath)
23 23 if not u.scheme:
24 24 path = util.normpath(os.path.abspath(u.path))
25 25 return path
26 26
27 27 def _getstorehashcachename(remotepath):
28 28 '''get a unique filename for the store hash cache of a remote repository'''
29 29 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
30 30
31 31 def _calcfilehash(filename):
32 32 data = ''
33 33 if os.path.exists(filename):
34 34 fd = open(filename)
35 35 data = fd.read()
36 36 fd.close()
37 37 return util.sha1(data).hexdigest()
38 38
39 39 class SubrepoAbort(error.Abort):
40 40 """Exception class used to avoid handling a subrepo error more than once"""
41 41 def __init__(self, *args, **kw):
42 42 error.Abort.__init__(self, *args, **kw)
43 43 self.subrepo = kw.get('subrepo')
44 44 self.cause = kw.get('cause')
45 45
46 46 def annotatesubrepoerror(func):
47 47 def decoratedmethod(self, *args, **kargs):
48 48 try:
49 49 res = func(self, *args, **kargs)
50 50 except SubrepoAbort, ex:
51 51 # This exception has already been handled
52 52 raise ex
53 53 except error.Abort, ex:
54 54 subrepo = subrelpath(self)
55 55 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
56 56 # avoid handling this exception by raising a SubrepoAbort exception
57 57 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
58 58 cause=sys.exc_info())
59 59 return res
60 60 return decoratedmethod
61 61
62 62 def state(ctx, ui):
63 63 """return a state dict, mapping subrepo paths configured in .hgsub
64 64 to tuple: (source from .hgsub, revision from .hgsubstate, kind
65 65 (key in types dict))
66 66 """
67 67 p = config.config()
68 68 def read(f, sections=None, remap=None):
69 69 if f in ctx:
70 70 try:
71 71 data = ctx[f].data()
72 72 except IOError, err:
73 73 if err.errno != errno.ENOENT:
74 74 raise
75 75 # handle missing subrepo spec files as removed
76 76 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
77 77 return
78 78 p.parse(f, data, sections, remap, read)
79 79 else:
80 80 raise util.Abort(_("subrepo spec file %s not found") % f)
81 81
82 82 if '.hgsub' in ctx:
83 83 read('.hgsub')
84 84
85 85 for path, src in ui.configitems('subpaths'):
86 86 p.set('subpaths', path, src, ui.configsource('subpaths', path))
87 87
88 88 rev = {}
89 89 if '.hgsubstate' in ctx:
90 90 try:
91 91 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
92 92 l = l.lstrip()
93 93 if not l:
94 94 continue
95 95 try:
96 96 revision, path = l.split(" ", 1)
97 97 except ValueError:
98 98 raise util.Abort(_("invalid subrepository revision "
99 99 "specifier in .hgsubstate line %d")
100 100 % (i + 1))
101 101 rev[path] = revision
102 102 except IOError, err:
103 103 if err.errno != errno.ENOENT:
104 104 raise
105 105
106 106 def remap(src):
107 107 for pattern, repl in p.items('subpaths'):
108 108 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
109 109 # does a string decode.
110 110 repl = repl.encode('string-escape')
111 111 # However, we still want to allow back references to go
112 112 # through unharmed, so we turn r'\\1' into r'\1'. Again,
113 113 # extra escapes are needed because re.sub string decodes.
114 114 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
115 115 try:
116 116 src = re.sub(pattern, repl, src, 1)
117 117 except re.error, e:
118 118 raise util.Abort(_("bad subrepository pattern in %s: %s")
119 119 % (p.source('subpaths', pattern), e))
120 120 return src
121 121
122 122 state = {}
123 123 for path, src in p[''].items():
124 124 kind = 'hg'
125 125 if src.startswith('['):
126 126 if ']' not in src:
127 127 raise util.Abort(_('missing ] in subrepo source'))
128 128 kind, src = src.split(']', 1)
129 129 kind = kind[1:]
130 130 src = src.lstrip() # strip any extra whitespace after ']'
131 131
132 132 if not util.url(src).isabs():
133 133 parent = _abssource(ctx._repo, abort=False)
134 134 if parent:
135 135 parent = util.url(parent)
136 136 parent.path = posixpath.join(parent.path or '', src)
137 137 parent.path = posixpath.normpath(parent.path)
138 138 joined = str(parent)
139 139 # Remap the full joined path and use it if it changes,
140 140 # else remap the original source.
141 141 remapped = remap(joined)
142 142 if remapped == joined:
143 143 src = remap(src)
144 144 else:
145 145 src = remapped
146 146
147 147 src = remap(src)
148 148 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
149 149
150 150 return state
151 151
152 152 def writestate(repo, state):
153 153 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
154 154 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
155 155 repo.wwrite('.hgsubstate', ''.join(lines), '')
156 156
157 157 def submerge(repo, wctx, mctx, actx, overwrite):
158 158 """delegated from merge.applyupdates: merging of .hgsubstate file
159 159 in working context, merging context and ancestor context"""
160 160 if mctx == actx: # backwards?
161 161 actx = wctx.p1()
162 162 s1 = wctx.substate
163 163 s2 = mctx.substate
164 164 sa = actx.substate
165 165 sm = {}
166 166
167 167 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
168 168
169 169 def debug(s, msg, r=""):
170 170 if r:
171 171 r = "%s:%s:%s" % r
172 172 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
173 173
174 174 for s, l in sorted(s1.iteritems()):
175 175 a = sa.get(s, nullstate)
176 176 ld = l # local state with possible dirty flag for compares
177 177 if wctx.sub(s).dirty():
178 178 ld = (l[0], l[1] + "+")
179 179 if wctx == actx: # overwrite
180 180 a = ld
181 181
182 182 if s in s2:
183 183 r = s2[s]
184 184 if ld == r or r == a: # no change or local is newer
185 185 sm[s] = l
186 186 continue
187 187 elif ld == a: # other side changed
188 188 debug(s, "other changed, get", r)
189 189 wctx.sub(s).get(r, overwrite)
190 190 sm[s] = r
191 191 elif ld[0] != r[0]: # sources differ
192 192 if repo.ui.promptchoice(
193 193 _(' subrepository sources for %s differ\n'
194 194 'use (l)ocal source (%s) or (r)emote source (%s)?')
195 195 % (s, l[0], r[0]),
196 196 (_('&Local'), _('&Remote')), 0):
197 197 debug(s, "prompt changed, get", r)
198 198 wctx.sub(s).get(r, overwrite)
199 199 sm[s] = r
200 200 elif ld[1] == a[1]: # local side is unchanged
201 201 debug(s, "other side changed, get", r)
202 202 wctx.sub(s).get(r, overwrite)
203 203 sm[s] = r
204 204 else:
205 205 debug(s, "both sides changed, merge with", r)
206 206 wctx.sub(s).merge(r)
207 207 sm[s] = l
208 208 elif ld == a: # remote removed, local unchanged
209 209 debug(s, "remote removed, remove")
210 210 wctx.sub(s).remove()
211 211 elif a == nullstate: # not present in remote or ancestor
212 212 debug(s, "local added, keep")
213 213 sm[s] = l
214 214 continue
215 215 else:
216 216 if repo.ui.promptchoice(
217 217 _(' local changed subrepository %s which remote removed\n'
218 218 'use (c)hanged version or (d)elete?') % s,
219 219 (_('&Changed'), _('&Delete')), 0):
220 220 debug(s, "prompt remove")
221 221 wctx.sub(s).remove()
222 222
223 223 for s, r in sorted(s2.items()):
224 224 if s in s1:
225 225 continue
226 226 elif s not in sa:
227 227 debug(s, "remote added, get", r)
228 228 mctx.sub(s).get(r)
229 229 sm[s] = r
230 230 elif r != sa[s]:
231 231 if repo.ui.promptchoice(
232 232 _(' remote changed subrepository %s which local removed\n'
233 233 'use (c)hanged version or (d)elete?') % s,
234 234 (_('&Changed'), _('&Delete')), 0) == 0:
235 235 debug(s, "prompt recreate", r)
236 236 wctx.sub(s).get(r)
237 237 sm[s] = r
238 238
239 239 # record merged .hgsubstate
240 240 writestate(repo, sm)
241 241
242 242 def _updateprompt(ui, sub, dirty, local, remote):
243 243 if dirty:
244 244 msg = (_(' subrepository sources for %s differ\n'
245 245 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
246 246 % (subrelpath(sub), local, remote))
247 247 else:
248 248 msg = (_(' subrepository sources for %s differ (in checked out '
249 249 'version)\n'
250 250 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
251 251 % (subrelpath(sub), local, remote))
252 252 return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
253 253
254 254 def reporelpath(repo):
255 255 """return path to this (sub)repo as seen from outermost repo"""
256 256 parent = repo
257 257 while util.safehasattr(parent, '_subparent'):
258 258 parent = parent._subparent
259 259 p = parent.root.rstrip(os.sep)
260 260 return repo.root[len(p) + 1:]
261 261
262 262 def subrelpath(sub):
263 263 """return path to this subrepo as seen from outermost repo"""
264 264 if util.safehasattr(sub, '_relpath'):
265 265 return sub._relpath
266 266 if not util.safehasattr(sub, '_repo'):
267 267 return sub._path
268 268 return reporelpath(sub._repo)
269 269
270 270 def _abssource(repo, push=False, abort=True):
271 271 """return pull/push path of repo - either based on parent repo .hgsub info
272 272 or on the top repo config. Abort or return None if no source found."""
273 273 if util.safehasattr(repo, '_subparent'):
274 274 source = util.url(repo._subsource)
275 275 if source.isabs():
276 276 return str(source)
277 277 source.path = posixpath.normpath(source.path)
278 278 parent = _abssource(repo._subparent, push, abort=False)
279 279 if parent:
280 280 parent = util.url(util.pconvert(parent))
281 281 parent.path = posixpath.join(parent.path or '', source.path)
282 282 parent.path = posixpath.normpath(parent.path)
283 283 return str(parent)
284 284 else: # recursion reached top repo
285 285 if util.safehasattr(repo, '_subtoppath'):
286 286 return repo._subtoppath
287 287 if push and repo.ui.config('paths', 'default-push'):
288 288 return repo.ui.config('paths', 'default-push')
289 289 if repo.ui.config('paths', 'default'):
290 290 return repo.ui.config('paths', 'default')
291 291 if repo.sharedpath != repo.path:
292 292 # chop off the .hg component to get the default path form
293 293 return os.path.dirname(repo.sharedpath)
294 294 if abort:
295 295 raise util.Abort(_("default path for subrepository not found"))
296 296
297 297 def itersubrepos(ctx1, ctx2):
298 298 """find subrepos in ctx1 or ctx2"""
299 299 # Create a (subpath, ctx) mapping where we prefer subpaths from
300 300 # ctx1. The subpaths from ctx2 are important when the .hgsub file
301 301 # has been modified (in ctx2) but not yet committed (in ctx1).
302 302 subpaths = dict.fromkeys(ctx2.substate, ctx2)
303 303 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
304 304 for subpath, ctx in sorted(subpaths.iteritems()):
305 305 yield subpath, ctx.sub(subpath)
306 306
307 307 def subrepo(ctx, path):
308 308 """return instance of the right subrepo class for subrepo in path"""
309 309 # subrepo inherently violates our import layering rules
310 310 # because it wants to make repo objects from deep inside the stack
311 311 # so we manually delay the circular imports to not break
312 312 # scripts that don't use our demand-loading
313 313 global hg
314 314 import hg as h
315 315 hg = h
316 316
317 317 scmutil.pathauditor(ctx._repo.root)(path)
318 318 state = ctx.substate[path]
319 319 if state[2] not in types:
320 320 raise util.Abort(_('unknown subrepo type %s') % state[2])
321 321 return types[state[2]](ctx, path, state[:2])
322 322
323 323 # subrepo classes need to implement the following abstract class:
324 324
325 325 class abstractsubrepo(object):
326 326
327 327 def storeclean(self, path):
328 328 """
329 329 returns true if the repository has not changed since it was last
330 330 cloned from or pushed to a given repository.
331 331 """
332 332 return False
333 333
334 334 def dirty(self, ignoreupdate=False):
335 335 """returns true if the dirstate of the subrepo is dirty or does not
336 336 match current stored state. If ignoreupdate is true, only check
337 337 whether the subrepo has uncommitted changes in its dirstate.
338 338 """
339 339 raise NotImplementedError
340 340
341 341 def basestate(self):
342 342 """current working directory base state, disregarding .hgsubstate
343 343 state and working directory modifications"""
344 344 raise NotImplementedError
345 345
346 346 def checknested(self, path):
347 347 """check if path is a subrepository within this repository"""
348 348 return False
349 349
350 350 def commit(self, text, user, date):
351 351 """commit the current changes to the subrepo with the given
352 352 log message. Use given user and date if possible. Return the
353 353 new state of the subrepo.
354 354 """
355 355 raise NotImplementedError
356 356
357 357 def remove(self):
358 358 """remove the subrepo
359 359
360 360 (should verify the dirstate is not dirty first)
361 361 """
362 362 raise NotImplementedError
363 363
364 364 def get(self, state, overwrite=False):
365 365 """run whatever commands are needed to put the subrepo into
366 366 this state
367 367 """
368 368 raise NotImplementedError
369 369
370 370 def merge(self, state):
371 371 """merge currently-saved state with the new state."""
372 372 raise NotImplementedError
373 373
374 374 def push(self, opts):
375 375 """perform whatever action is analogous to 'hg push'
376 376
377 377 This may be a no-op on some systems.
378 378 """
379 379 raise NotImplementedError
380 380
381 381 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
382 382 return []
383 383
384 384 def status(self, rev2, **opts):
385 385 return [], [], [], [], [], [], []
386 386
387 387 def diff(self, ui, diffopts, node2, match, prefix, **opts):
388 388 pass
389 389
390 390 def outgoing(self, ui, dest, opts):
391 391 return 1
392 392
393 393 def incoming(self, ui, source, opts):
394 394 return 1
395 395
396 396 def files(self):
397 397 """return filename iterator"""
398 398 raise NotImplementedError
399 399
400 400 def filedata(self, name):
401 401 """return file data"""
402 402 raise NotImplementedError
403 403
404 404 def fileflags(self, name):
405 405 """return file flags"""
406 406 return ''
407 407
408 408 def archive(self, ui, archiver, prefix, match=None):
409 409 if match is not None:
410 410 files = [f for f in self.files() if match(f)]
411 411 else:
412 412 files = self.files()
413 413 total = len(files)
414 414 relpath = subrelpath(self)
415 415 ui.progress(_('archiving (%s)') % relpath, 0,
416 416 unit=_('files'), total=total)
417 417 for i, name in enumerate(files):
418 418 flags = self.fileflags(name)
419 419 mode = 'x' in flags and 0755 or 0644
420 420 symlink = 'l' in flags
421 421 archiver.addfile(os.path.join(prefix, self._path, name),
422 422 mode, symlink, self.filedata(name))
423 423 ui.progress(_('archiving (%s)') % relpath, i + 1,
424 424 unit=_('files'), total=total)
425 425 ui.progress(_('archiving (%s)') % relpath, None)
426 return total
426 427
427 428 def walk(self, match):
428 429 '''
429 430 walk recursively through the directory tree, finding all files
430 431 matched by the match function
431 432 '''
432 433 pass
433 434
434 435 def forget(self, ui, match, prefix):
435 436 return ([], [])
436 437
437 438 def revert(self, ui, substate, *pats, **opts):
438 439 ui.warn('%s: reverting %s subrepos is unsupported\n' \
439 440 % (substate[0], substate[2]))
440 441 return []
441 442
442 443 class hgsubrepo(abstractsubrepo):
443 444 def __init__(self, ctx, path, state):
444 445 self._path = path
445 446 self._state = state
446 447 r = ctx._repo
447 448 root = r.wjoin(path)
448 449 create = False
449 450 if not os.path.exists(os.path.join(root, '.hg')):
450 451 create = True
451 452 util.makedirs(root)
452 453 self._repo = hg.repository(r.baseui, root, create=create)
453 454 for s, k in [('ui', 'commitsubrepos')]:
454 455 v = r.ui.config(s, k)
455 456 if v:
456 457 self._repo.ui.setconfig(s, k, v)
457 458 self._repo.ui.setconfig('ui', '_usedassubrepo', 'True')
458 459 self._initrepo(r, state[0], create)
459 460
460 461 def storeclean(self, path):
461 462 clean = True
462 463 lock = self._repo.lock()
463 464 itercache = self._calcstorehash(path)
464 465 try:
465 466 for filehash in self._readstorehashcache(path):
466 467 if filehash != itercache.next():
467 468 clean = False
468 469 break
469 470 except StopIteration:
470 471 # the cached and current pull states have a different size
471 472 clean = False
472 473 if clean:
473 474 try:
474 475 itercache.next()
475 476 # the cached and current pull states have a different size
476 477 clean = False
477 478 except StopIteration:
478 479 pass
479 480 lock.release()
480 481 return clean
481 482
482 483 def _calcstorehash(self, remotepath):
483 484 '''calculate a unique "store hash"
484 485
485 486 This method is used to to detect when there are changes that may
486 487 require a push to a given remote path.'''
487 488 # sort the files that will be hashed in increasing (likely) file size
488 489 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
489 490 yield '# %s\n' % _expandedabspath(remotepath)
490 491 for relname in filelist:
491 492 absname = os.path.normpath(self._repo.join(relname))
492 493 yield '%s = %s\n' % (relname, _calcfilehash(absname))
493 494
494 495 def _getstorehashcachepath(self, remotepath):
495 496 '''get a unique path for the store hash cache'''
496 497 return self._repo.join(os.path.join(
497 498 'cache', 'storehash', _getstorehashcachename(remotepath)))
498 499
499 500 def _readstorehashcache(self, remotepath):
500 501 '''read the store hash cache for a given remote repository'''
501 502 cachefile = self._getstorehashcachepath(remotepath)
502 503 if not os.path.exists(cachefile):
503 504 return ''
504 505 fd = open(cachefile, 'r')
505 506 pullstate = fd.readlines()
506 507 fd.close()
507 508 return pullstate
508 509
509 510 def _cachestorehash(self, remotepath):
510 511 '''cache the current store hash
511 512
512 513 Each remote repo requires its own store hash cache, because a subrepo
513 514 store may be "clean" versus a given remote repo, but not versus another
514 515 '''
515 516 cachefile = self._getstorehashcachepath(remotepath)
516 517 lock = self._repo.lock()
517 518 storehash = list(self._calcstorehash(remotepath))
518 519 cachedir = os.path.dirname(cachefile)
519 520 if not os.path.exists(cachedir):
520 521 util.makedirs(cachedir, notindexed=True)
521 522 fd = open(cachefile, 'w')
522 523 fd.writelines(storehash)
523 524 fd.close()
524 525 lock.release()
525 526
526 527 @annotatesubrepoerror
527 528 def _initrepo(self, parentrepo, source, create):
528 529 self._repo._subparent = parentrepo
529 530 self._repo._subsource = source
530 531
531 532 if create:
532 533 fp = self._repo.opener("hgrc", "w", text=True)
533 534 fp.write('[paths]\n')
534 535
535 536 def addpathconfig(key, value):
536 537 if value:
537 538 fp.write('%s = %s\n' % (key, value))
538 539 self._repo.ui.setconfig('paths', key, value)
539 540
540 541 defpath = _abssource(self._repo, abort=False)
541 542 defpushpath = _abssource(self._repo, True, abort=False)
542 543 addpathconfig('default', defpath)
543 544 if defpath != defpushpath:
544 545 addpathconfig('default-push', defpushpath)
545 546 fp.close()
546 547
547 548 @annotatesubrepoerror
548 549 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
549 550 return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
550 551 os.path.join(prefix, self._path), explicitonly)
551 552
552 553 @annotatesubrepoerror
553 554 def status(self, rev2, **opts):
554 555 try:
555 556 rev1 = self._state[1]
556 557 ctx1 = self._repo[rev1]
557 558 ctx2 = self._repo[rev2]
558 559 return self._repo.status(ctx1, ctx2, **opts)
559 560 except error.RepoLookupError, inst:
560 561 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
561 562 % (inst, subrelpath(self)))
562 563 return [], [], [], [], [], [], []
563 564
564 565 @annotatesubrepoerror
565 566 def diff(self, ui, diffopts, node2, match, prefix, **opts):
566 567 try:
567 568 node1 = node.bin(self._state[1])
568 569 # We currently expect node2 to come from substate and be
569 570 # in hex format
570 571 if node2 is not None:
571 572 node2 = node.bin(node2)
572 573 cmdutil.diffordiffstat(ui, self._repo, diffopts,
573 574 node1, node2, match,
574 575 prefix=posixpath.join(prefix, self._path),
575 576 listsubrepos=True, **opts)
576 577 except error.RepoLookupError, inst:
577 578 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
578 579 % (inst, subrelpath(self)))
579 580
580 581 @annotatesubrepoerror
581 582 def archive(self, ui, archiver, prefix, match=None):
582 583 self._get(self._state + ('hg',))
583 abstractsubrepo.archive(self, ui, archiver, prefix, match)
584
584 total = abstractsubrepo.archive(self, ui, archiver, prefix, match)
585 585 rev = self._state[1]
586 586 ctx = self._repo[rev]
587 587 for subpath in ctx.substate:
588 588 s = subrepo(ctx, subpath)
589 589 submatch = matchmod.narrowmatcher(subpath, match)
590 s.archive(ui, archiver, os.path.join(prefix, self._path), submatch)
590 total += s.archive(
591 ui, archiver, os.path.join(prefix, self._path), submatch)
592 return total
591 593
592 594 @annotatesubrepoerror
593 595 def dirty(self, ignoreupdate=False):
594 596 r = self._state[1]
595 597 if r == '' and not ignoreupdate: # no state recorded
596 598 return True
597 599 w = self._repo[None]
598 600 if r != w.p1().hex() and not ignoreupdate:
599 601 # different version checked out
600 602 return True
601 603 return w.dirty() # working directory changed
602 604
603 605 def basestate(self):
604 606 return self._repo['.'].hex()
605 607
606 608 def checknested(self, path):
607 609 return self._repo._checknested(self._repo.wjoin(path))
608 610
609 611 @annotatesubrepoerror
610 612 def commit(self, text, user, date):
611 613 # don't bother committing in the subrepo if it's only been
612 614 # updated
613 615 if not self.dirty(True):
614 616 return self._repo['.'].hex()
615 617 self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
616 618 n = self._repo.commit(text, user, date)
617 619 if not n:
618 620 return self._repo['.'].hex() # different version checked out
619 621 return node.hex(n)
620 622
621 623 @annotatesubrepoerror
622 624 def remove(self):
623 625 # we can't fully delete the repository as it may contain
624 626 # local-only history
625 627 self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
626 628 hg.clean(self._repo, node.nullid, False)
627 629
628 630 def _get(self, state):
629 631 source, revision, kind = state
630 632 if revision not in self._repo:
631 633 self._repo._subsource = source
632 634 srcurl = _abssource(self._repo)
633 635 other = hg.peer(self._repo, {}, srcurl)
634 636 if len(self._repo) == 0:
635 637 self._repo.ui.status(_('cloning subrepo %s from %s\n')
636 638 % (subrelpath(self), srcurl))
637 639 parentrepo = self._repo._subparent
638 640 shutil.rmtree(self._repo.path)
639 641 other, cloned = hg.clone(self._repo._subparent.baseui, {},
640 642 other, self._repo.root,
641 643 update=False)
642 644 self._repo = cloned.local()
643 645 self._initrepo(parentrepo, source, create=True)
644 646 self._cachestorehash(srcurl)
645 647 else:
646 648 self._repo.ui.status(_('pulling subrepo %s from %s\n')
647 649 % (subrelpath(self), srcurl))
648 650 cleansub = self.storeclean(srcurl)
649 651 remotebookmarks = other.listkeys('bookmarks')
650 652 self._repo.pull(other)
651 653 bookmarks.updatefromremote(self._repo.ui, self._repo,
652 654 remotebookmarks, srcurl)
653 655 if cleansub:
654 656 # keep the repo clean after pull
655 657 self._cachestorehash(srcurl)
656 658
657 659 @annotatesubrepoerror
658 660 def get(self, state, overwrite=False):
659 661 self._get(state)
660 662 source, revision, kind = state
661 663 self._repo.ui.debug("getting subrepo %s\n" % self._path)
662 664 hg.updaterepo(self._repo, revision, overwrite)
663 665
664 666 @annotatesubrepoerror
665 667 def merge(self, state):
666 668 self._get(state)
667 669 cur = self._repo['.']
668 670 dst = self._repo[state[1]]
669 671 anc = dst.ancestor(cur)
670 672
671 673 def mergefunc():
672 674 if anc == cur and dst.branch() == cur.branch():
673 675 self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
674 676 hg.update(self._repo, state[1])
675 677 elif anc == dst:
676 678 self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
677 679 else:
678 680 self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
679 681 hg.merge(self._repo, state[1], remind=False)
680 682
681 683 wctx = self._repo[None]
682 684 if self.dirty():
683 685 if anc != dst:
684 686 if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
685 687 mergefunc()
686 688 else:
687 689 mergefunc()
688 690 else:
689 691 mergefunc()
690 692
691 693 @annotatesubrepoerror
692 694 def push(self, opts):
693 695 force = opts.get('force')
694 696 newbranch = opts.get('new_branch')
695 697 ssh = opts.get('ssh')
696 698
697 699 # push subrepos depth-first for coherent ordering
698 700 c = self._repo['']
699 701 subs = c.substate # only repos that are committed
700 702 for s in sorted(subs):
701 703 if c.sub(s).push(opts) == 0:
702 704 return False
703 705
704 706 dsturl = _abssource(self._repo, True)
705 707 if not force:
706 708 if self.storeclean(dsturl):
707 709 self._repo.ui.status(
708 710 _('no changes made to subrepo %s since last push to %s\n')
709 711 % (subrelpath(self), dsturl))
710 712 return None
711 713 self._repo.ui.status(_('pushing subrepo %s to %s\n') %
712 714 (subrelpath(self), dsturl))
713 715 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
714 716 res = self._repo.push(other, force, newbranch=newbranch)
715 717
716 718 # the repo is now clean
717 719 self._cachestorehash(dsturl)
718 720 return res
719 721
720 722 @annotatesubrepoerror
721 723 def outgoing(self, ui, dest, opts):
722 724 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
723 725
724 726 @annotatesubrepoerror
725 727 def incoming(self, ui, source, opts):
726 728 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
727 729
728 730 @annotatesubrepoerror
729 731 def files(self):
730 732 rev = self._state[1]
731 733 ctx = self._repo[rev]
732 734 return ctx.manifest()
733 735
734 736 def filedata(self, name):
735 737 rev = self._state[1]
736 738 return self._repo[rev][name].data()
737 739
738 740 def fileflags(self, name):
739 741 rev = self._state[1]
740 742 ctx = self._repo[rev]
741 743 return ctx.flags(name)
742 744
743 745 def walk(self, match):
744 746 ctx = self._repo[None]
745 747 return ctx.walk(match)
746 748
747 749 @annotatesubrepoerror
748 750 def forget(self, ui, match, prefix):
749 751 return cmdutil.forget(ui, self._repo, match,
750 752 os.path.join(prefix, self._path), True)
751 753
752 754 @annotatesubrepoerror
753 755 def revert(self, ui, substate, *pats, **opts):
754 756 # reverting a subrepo is a 2 step process:
755 757 # 1. if the no_backup is not set, revert all modified
756 758 # files inside the subrepo
757 759 # 2. update the subrepo to the revision specified in
758 760 # the corresponding substate dictionary
759 761 ui.status(_('reverting subrepo %s\n') % substate[0])
760 762 if not opts.get('no_backup'):
761 763 # Revert all files on the subrepo, creating backups
762 764 # Note that this will not recursively revert subrepos
763 765 # We could do it if there was a set:subrepos() predicate
764 766 opts = opts.copy()
765 767 opts['date'] = None
766 768 opts['rev'] = substate[1]
767 769
768 770 pats = []
769 771 if not opts.get('all'):
770 772 pats = ['set:modified()']
771 773 self.filerevert(ui, *pats, **opts)
772 774
773 775 # Update the repo to the revision specified in the given substate
774 776 self.get(substate, overwrite=True)
775 777
776 778 def filerevert(self, ui, *pats, **opts):
777 779 ctx = self._repo[opts['rev']]
778 780 parents = self._repo.dirstate.parents()
779 781 if opts.get('all'):
780 782 pats = ['set:modified()']
781 783 else:
782 784 pats = []
783 785 cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
784 786
785 787 class svnsubrepo(abstractsubrepo):
786 788 def __init__(self, ctx, path, state):
787 789 self._path = path
788 790 self._state = state
789 791 self._ctx = ctx
790 792 self._ui = ctx._repo.ui
791 793 self._exe = util.findexe('svn')
792 794 if not self._exe:
793 795 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
794 796 % self._path)
795 797
796 798 def _svncommand(self, commands, filename='', failok=False):
797 799 cmd = [self._exe]
798 800 extrakw = {}
799 801 if not self._ui.interactive():
800 802 # Making stdin be a pipe should prevent svn from behaving
801 803 # interactively even if we can't pass --non-interactive.
802 804 extrakw['stdin'] = subprocess.PIPE
803 805 # Starting in svn 1.5 --non-interactive is a global flag
804 806 # instead of being per-command, but we need to support 1.4 so
805 807 # we have to be intelligent about what commands take
806 808 # --non-interactive.
807 809 if commands[0] in ('update', 'checkout', 'commit'):
808 810 cmd.append('--non-interactive')
809 811 cmd.extend(commands)
810 812 if filename is not None:
811 813 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
812 814 cmd.append(path)
813 815 env = dict(os.environ)
814 816 # Avoid localized output, preserve current locale for everything else.
815 817 lc_all = env.get('LC_ALL')
816 818 if lc_all:
817 819 env['LANG'] = lc_all
818 820 del env['LC_ALL']
819 821 env['LC_MESSAGES'] = 'C'
820 822 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
821 823 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
822 824 universal_newlines=True, env=env, **extrakw)
823 825 stdout, stderr = p.communicate()
824 826 stderr = stderr.strip()
825 827 if not failok:
826 828 if p.returncode:
827 829 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
828 830 if stderr:
829 831 self._ui.warn(stderr + '\n')
830 832 return stdout, stderr
831 833
832 834 @propertycache
833 835 def _svnversion(self):
834 836 output, err = self._svncommand(['--version', '--quiet'], filename=None)
835 837 m = re.search(r'^(\d+)\.(\d+)', output)
836 838 if not m:
837 839 raise util.Abort(_('cannot retrieve svn tool version'))
838 840 return (int(m.group(1)), int(m.group(2)))
839 841
840 842 def _wcrevs(self):
841 843 # Get the working directory revision as well as the last
842 844 # commit revision so we can compare the subrepo state with
843 845 # both. We used to store the working directory one.
844 846 output, err = self._svncommand(['info', '--xml'])
845 847 doc = xml.dom.minidom.parseString(output)
846 848 entries = doc.getElementsByTagName('entry')
847 849 lastrev, rev = '0', '0'
848 850 if entries:
849 851 rev = str(entries[0].getAttribute('revision')) or '0'
850 852 commits = entries[0].getElementsByTagName('commit')
851 853 if commits:
852 854 lastrev = str(commits[0].getAttribute('revision')) or '0'
853 855 return (lastrev, rev)
854 856
855 857 def _wcrev(self):
856 858 return self._wcrevs()[0]
857 859
858 860 def _wcchanged(self):
859 861 """Return (changes, extchanges, missing) where changes is True
860 862 if the working directory was changed, extchanges is
861 863 True if any of these changes concern an external entry and missing
862 864 is True if any change is a missing entry.
863 865 """
864 866 output, err = self._svncommand(['status', '--xml'])
865 867 externals, changes, missing = [], [], []
866 868 doc = xml.dom.minidom.parseString(output)
867 869 for e in doc.getElementsByTagName('entry'):
868 870 s = e.getElementsByTagName('wc-status')
869 871 if not s:
870 872 continue
871 873 item = s[0].getAttribute('item')
872 874 props = s[0].getAttribute('props')
873 875 path = e.getAttribute('path')
874 876 if item == 'external':
875 877 externals.append(path)
876 878 elif item == 'missing':
877 879 missing.append(path)
878 880 if (item not in ('', 'normal', 'unversioned', 'external')
879 881 or props not in ('', 'none', 'normal')):
880 882 changes.append(path)
881 883 for path in changes:
882 884 for ext in externals:
883 885 if path == ext or path.startswith(ext + os.sep):
884 886 return True, True, bool(missing)
885 887 return bool(changes), False, bool(missing)
886 888
887 889 def dirty(self, ignoreupdate=False):
888 890 if not self._wcchanged()[0]:
889 891 if self._state[1] in self._wcrevs() or ignoreupdate:
890 892 return False
891 893 return True
892 894
893 895 def basestate(self):
894 896 lastrev, rev = self._wcrevs()
895 897 if lastrev != rev:
896 898 # Last committed rev is not the same than rev. We would
897 899 # like to take lastrev but we do not know if the subrepo
898 900 # URL exists at lastrev. Test it and fallback to rev it
899 901 # is not there.
900 902 try:
901 903 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
902 904 return lastrev
903 905 except error.Abort:
904 906 pass
905 907 return rev
906 908
907 909 @annotatesubrepoerror
908 910 def commit(self, text, user, date):
909 911 # user and date are out of our hands since svn is centralized
910 912 changed, extchanged, missing = self._wcchanged()
911 913 if not changed:
912 914 return self.basestate()
913 915 if extchanged:
914 916 # Do not try to commit externals
915 917 raise util.Abort(_('cannot commit svn externals'))
916 918 if missing:
917 919 # svn can commit with missing entries but aborting like hg
918 920 # seems a better approach.
919 921 raise util.Abort(_('cannot commit missing svn entries'))
920 922 commitinfo, err = self._svncommand(['commit', '-m', text])
921 923 self._ui.status(commitinfo)
922 924 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
923 925 if not newrev:
924 926 if not commitinfo.strip():
925 927 # Sometimes, our definition of "changed" differs from
926 928 # svn one. For instance, svn ignores missing files
927 929 # when committing. If there are only missing files, no
928 930 # commit is made, no output and no error code.
929 931 raise util.Abort(_('failed to commit svn changes'))
930 932 raise util.Abort(commitinfo.splitlines()[-1])
931 933 newrev = newrev.groups()[0]
932 934 self._ui.status(self._svncommand(['update', '-r', newrev])[0])
933 935 return newrev
934 936
935 937 @annotatesubrepoerror
936 938 def remove(self):
937 939 if self.dirty():
938 940 self._ui.warn(_('not removing repo %s because '
939 941 'it has changes.\n' % self._path))
940 942 return
941 943 self._ui.note(_('removing subrepo %s\n') % self._path)
942 944
943 945 def onerror(function, path, excinfo):
944 946 if function is not os.remove:
945 947 raise
946 948 # read-only files cannot be unlinked under Windows
947 949 s = os.stat(path)
948 950 if (s.st_mode & stat.S_IWRITE) != 0:
949 951 raise
950 952 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
951 953 os.remove(path)
952 954
953 955 path = self._ctx._repo.wjoin(self._path)
954 956 shutil.rmtree(path, onerror=onerror)
955 957 try:
956 958 os.removedirs(os.path.dirname(path))
957 959 except OSError:
958 960 pass
959 961
960 962 @annotatesubrepoerror
961 963 def get(self, state, overwrite=False):
962 964 if overwrite:
963 965 self._svncommand(['revert', '--recursive'])
964 966 args = ['checkout']
965 967 if self._svnversion >= (1, 5):
966 968 args.append('--force')
967 969 # The revision must be specified at the end of the URL to properly
968 970 # update to a directory which has since been deleted and recreated.
969 971 args.append('%s@%s' % (state[0], state[1]))
970 972 status, err = self._svncommand(args, failok=True)
971 973 if not re.search('Checked out revision [0-9]+.', status):
972 974 if ('is already a working copy for a different URL' in err
973 975 and (self._wcchanged()[:2] == (False, False))):
974 976 # obstructed but clean working copy, so just blow it away.
975 977 self.remove()
976 978 self.get(state, overwrite=False)
977 979 return
978 980 raise util.Abort((status or err).splitlines()[-1])
979 981 self._ui.status(status)
980 982
981 983 @annotatesubrepoerror
982 984 def merge(self, state):
983 985 old = self._state[1]
984 986 new = state[1]
985 987 wcrev = self._wcrev()
986 988 if new != wcrev:
987 989 dirty = old == wcrev or self._wcchanged()[0]
988 990 if _updateprompt(self._ui, self, dirty, wcrev, new):
989 991 self.get(state, False)
990 992
991 993 def push(self, opts):
992 994 # push is a no-op for SVN
993 995 return True
994 996
995 997 @annotatesubrepoerror
996 998 def files(self):
997 999 output = self._svncommand(['list', '--recursive', '--xml'])[0]
998 1000 doc = xml.dom.minidom.parseString(output)
999 1001 paths = []
1000 1002 for e in doc.getElementsByTagName('entry'):
1001 1003 kind = str(e.getAttribute('kind'))
1002 1004 if kind != 'file':
1003 1005 continue
1004 1006 name = ''.join(c.data for c
1005 1007 in e.getElementsByTagName('name')[0].childNodes
1006 1008 if c.nodeType == c.TEXT_NODE)
1007 1009 paths.append(name.encode('utf-8'))
1008 1010 return paths
1009 1011
1010 1012 def filedata(self, name):
1011 1013 return self._svncommand(['cat'], name)[0]
1012 1014
1013 1015
1014 1016 class gitsubrepo(abstractsubrepo):
1015 1017 def __init__(self, ctx, path, state):
1016 1018 self._state = state
1017 1019 self._ctx = ctx
1018 1020 self._path = path
1019 1021 self._relpath = os.path.join(reporelpath(ctx._repo), path)
1020 1022 self._abspath = ctx._repo.wjoin(path)
1021 1023 self._subparent = ctx._repo
1022 1024 self._ui = ctx._repo.ui
1023 1025 self._ensuregit()
1024 1026
1025 1027 def _ensuregit(self):
1026 1028 try:
1027 1029 self._gitexecutable = 'git'
1028 1030 out, err = self._gitnodir(['--version'])
1029 1031 except OSError, e:
1030 1032 if e.errno != 2 or os.name != 'nt':
1031 1033 raise
1032 1034 self._gitexecutable = 'git.cmd'
1033 1035 out, err = self._gitnodir(['--version'])
1034 1036 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1035 1037 if not m:
1036 1038 self._ui.warn(_('cannot retrieve git version'))
1037 1039 return
1038 1040 version = (int(m.group(1)), m.group(2), m.group(3))
1039 1041 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1040 1042 # despite the docstring comment. For now, error on 1.4.0, warn on
1041 1043 # 1.5.0 but attempt to continue.
1042 1044 if version < (1, 5, 0):
1043 1045 raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
1044 1046 elif version < (1, 6, 0):
1045 1047 self._ui.warn(_('git subrepo requires at least 1.6.0 or later'))
1046 1048
1047 1049 def _gitcommand(self, commands, env=None, stream=False):
1048 1050 return self._gitdir(commands, env=env, stream=stream)[0]
1049 1051
1050 1052 def _gitdir(self, commands, env=None, stream=False):
1051 1053 return self._gitnodir(commands, env=env, stream=stream,
1052 1054 cwd=self._abspath)
1053 1055
1054 1056 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1055 1057 """Calls the git command
1056 1058
1057 1059 The methods tries to call the git command. versions prior to 1.6.0
1058 1060 are not supported and very probably fail.
1059 1061 """
1060 1062 self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1061 1063 # unless ui.quiet is set, print git's stderr,
1062 1064 # which is mostly progress and useful info
1063 1065 errpipe = None
1064 1066 if self._ui.quiet:
1065 1067 errpipe = open(os.devnull, 'w')
1066 1068 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1067 1069 cwd=cwd, env=env, close_fds=util.closefds,
1068 1070 stdout=subprocess.PIPE, stderr=errpipe)
1069 1071 if stream:
1070 1072 return p.stdout, None
1071 1073
1072 1074 retdata = p.stdout.read().strip()
1073 1075 # wait for the child to exit to avoid race condition.
1074 1076 p.wait()
1075 1077
1076 1078 if p.returncode != 0 and p.returncode != 1:
1077 1079 # there are certain error codes that are ok
1078 1080 command = commands[0]
1079 1081 if command in ('cat-file', 'symbolic-ref'):
1080 1082 return retdata, p.returncode
1081 1083 # for all others, abort
1082 1084 raise util.Abort('git %s error %d in %s' %
1083 1085 (command, p.returncode, self._relpath))
1084 1086
1085 1087 return retdata, p.returncode
1086 1088
1087 1089 def _gitmissing(self):
1088 1090 return not os.path.exists(os.path.join(self._abspath, '.git'))
1089 1091
1090 1092 def _gitstate(self):
1091 1093 return self._gitcommand(['rev-parse', 'HEAD'])
1092 1094
1093 1095 def _gitcurrentbranch(self):
1094 1096 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1095 1097 if err:
1096 1098 current = None
1097 1099 return current
1098 1100
1099 1101 def _gitremote(self, remote):
1100 1102 out = self._gitcommand(['remote', 'show', '-n', remote])
1101 1103 line = out.split('\n')[1]
1102 1104 i = line.index('URL: ') + len('URL: ')
1103 1105 return line[i:]
1104 1106
1105 1107 def _githavelocally(self, revision):
1106 1108 out, code = self._gitdir(['cat-file', '-e', revision])
1107 1109 return code == 0
1108 1110
1109 1111 def _gitisancestor(self, r1, r2):
1110 1112 base = self._gitcommand(['merge-base', r1, r2])
1111 1113 return base == r1
1112 1114
1113 1115 def _gitisbare(self):
1114 1116 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1115 1117
1116 1118 def _gitupdatestat(self):
1117 1119 """This must be run before git diff-index.
1118 1120 diff-index only looks at changes to file stat;
1119 1121 this command looks at file contents and updates the stat."""
1120 1122 self._gitcommand(['update-index', '-q', '--refresh'])
1121 1123
1122 1124 def _gitbranchmap(self):
1123 1125 '''returns 2 things:
1124 1126 a map from git branch to revision
1125 1127 a map from revision to branches'''
1126 1128 branch2rev = {}
1127 1129 rev2branch = {}
1128 1130
1129 1131 out = self._gitcommand(['for-each-ref', '--format',
1130 1132 '%(objectname) %(refname)'])
1131 1133 for line in out.split('\n'):
1132 1134 revision, ref = line.split(' ')
1133 1135 if (not ref.startswith('refs/heads/') and
1134 1136 not ref.startswith('refs/remotes/')):
1135 1137 continue
1136 1138 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1137 1139 continue # ignore remote/HEAD redirects
1138 1140 branch2rev[ref] = revision
1139 1141 rev2branch.setdefault(revision, []).append(ref)
1140 1142 return branch2rev, rev2branch
1141 1143
1142 1144 def _gittracking(self, branches):
1143 1145 'return map of remote branch to local tracking branch'
1144 1146 # assumes no more than one local tracking branch for each remote
1145 1147 tracking = {}
1146 1148 for b in branches:
1147 1149 if b.startswith('refs/remotes/'):
1148 1150 continue
1149 1151 bname = b.split('/', 2)[2]
1150 1152 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1151 1153 if remote:
1152 1154 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1153 1155 tracking['refs/remotes/%s/%s' %
1154 1156 (remote, ref.split('/', 2)[2])] = b
1155 1157 return tracking
1156 1158
1157 1159 def _abssource(self, source):
1158 1160 if '://' not in source:
1159 1161 # recognize the scp syntax as an absolute source
1160 1162 colon = source.find(':')
1161 1163 if colon != -1 and '/' not in source[:colon]:
1162 1164 return source
1163 1165 self._subsource = source
1164 1166 return _abssource(self)
1165 1167
1166 1168 def _fetch(self, source, revision):
1167 1169 if self._gitmissing():
1168 1170 source = self._abssource(source)
1169 1171 self._ui.status(_('cloning subrepo %s from %s\n') %
1170 1172 (self._relpath, source))
1171 1173 self._gitnodir(['clone', source, self._abspath])
1172 1174 if self._githavelocally(revision):
1173 1175 return
1174 1176 self._ui.status(_('pulling subrepo %s from %s\n') %
1175 1177 (self._relpath, self._gitremote('origin')))
1176 1178 # try only origin: the originally cloned repo
1177 1179 self._gitcommand(['fetch'])
1178 1180 if not self._githavelocally(revision):
1179 1181 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
1180 1182 (revision, self._relpath))
1181 1183
1182 1184 @annotatesubrepoerror
1183 1185 def dirty(self, ignoreupdate=False):
1184 1186 if self._gitmissing():
1185 1187 return self._state[1] != ''
1186 1188 if self._gitisbare():
1187 1189 return True
1188 1190 if not ignoreupdate and self._state[1] != self._gitstate():
1189 1191 # different version checked out
1190 1192 return True
1191 1193 # check for staged changes or modified files; ignore untracked files
1192 1194 self._gitupdatestat()
1193 1195 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1194 1196 return code == 1
1195 1197
1196 1198 def basestate(self):
1197 1199 return self._gitstate()
1198 1200
1199 1201 @annotatesubrepoerror
1200 1202 def get(self, state, overwrite=False):
1201 1203 source, revision, kind = state
1202 1204 if not revision:
1203 1205 self.remove()
1204 1206 return
1205 1207 self._fetch(source, revision)
1206 1208 # if the repo was set to be bare, unbare it
1207 1209 if self._gitisbare():
1208 1210 self._gitcommand(['config', 'core.bare', 'false'])
1209 1211 if self._gitstate() == revision:
1210 1212 self._gitcommand(['reset', '--hard', 'HEAD'])
1211 1213 return
1212 1214 elif self._gitstate() == revision:
1213 1215 if overwrite:
1214 1216 # first reset the index to unmark new files for commit, because
1215 1217 # reset --hard will otherwise throw away files added for commit,
1216 1218 # not just unmark them.
1217 1219 self._gitcommand(['reset', 'HEAD'])
1218 1220 self._gitcommand(['reset', '--hard', 'HEAD'])
1219 1221 return
1220 1222 branch2rev, rev2branch = self._gitbranchmap()
1221 1223
1222 1224 def checkout(args):
1223 1225 cmd = ['checkout']
1224 1226 if overwrite:
1225 1227 # first reset the index to unmark new files for commit, because
1226 1228 # the -f option will otherwise throw away files added for
1227 1229 # commit, not just unmark them.
1228 1230 self._gitcommand(['reset', 'HEAD'])
1229 1231 cmd.append('-f')
1230 1232 self._gitcommand(cmd + args)
1231 1233
1232 1234 def rawcheckout():
1233 1235 # no branch to checkout, check it out with no branch
1234 1236 self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1235 1237 self._relpath)
1236 1238 self._ui.warn(_('check out a git branch if you intend '
1237 1239 'to make changes\n'))
1238 1240 checkout(['-q', revision])
1239 1241
1240 1242 if revision not in rev2branch:
1241 1243 rawcheckout()
1242 1244 return
1243 1245 branches = rev2branch[revision]
1244 1246 firstlocalbranch = None
1245 1247 for b in branches:
1246 1248 if b == 'refs/heads/master':
1247 1249 # master trumps all other branches
1248 1250 checkout(['refs/heads/master'])
1249 1251 return
1250 1252 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1251 1253 firstlocalbranch = b
1252 1254 if firstlocalbranch:
1253 1255 checkout([firstlocalbranch])
1254 1256 return
1255 1257
1256 1258 tracking = self._gittracking(branch2rev.keys())
1257 1259 # choose a remote branch already tracked if possible
1258 1260 remote = branches[0]
1259 1261 if remote not in tracking:
1260 1262 for b in branches:
1261 1263 if b in tracking:
1262 1264 remote = b
1263 1265 break
1264 1266
1265 1267 if remote not in tracking:
1266 1268 # create a new local tracking branch
1267 1269 local = remote.split('/', 2)[2]
1268 1270 checkout(['-b', local, remote])
1269 1271 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1270 1272 # When updating to a tracked remote branch,
1271 1273 # if the local tracking branch is downstream of it,
1272 1274 # a normal `git pull` would have performed a "fast-forward merge"
1273 1275 # which is equivalent to updating the local branch to the remote.
1274 1276 # Since we are only looking at branching at update, we need to
1275 1277 # detect this situation and perform this action lazily.
1276 1278 if tracking[remote] != self._gitcurrentbranch():
1277 1279 checkout([tracking[remote]])
1278 1280 self._gitcommand(['merge', '--ff', remote])
1279 1281 else:
1280 1282 # a real merge would be required, just checkout the revision
1281 1283 rawcheckout()
1282 1284
1283 1285 @annotatesubrepoerror
1284 1286 def commit(self, text, user, date):
1285 1287 if self._gitmissing():
1286 1288 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1287 1289 cmd = ['commit', '-a', '-m', text]
1288 1290 env = os.environ.copy()
1289 1291 if user:
1290 1292 cmd += ['--author', user]
1291 1293 if date:
1292 1294 # git's date parser silently ignores when seconds < 1e9
1293 1295 # convert to ISO8601
1294 1296 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1295 1297 '%Y-%m-%dT%H:%M:%S %1%2')
1296 1298 self._gitcommand(cmd, env=env)
1297 1299 # make sure commit works otherwise HEAD might not exist under certain
1298 1300 # circumstances
1299 1301 return self._gitstate()
1300 1302
1301 1303 @annotatesubrepoerror
1302 1304 def merge(self, state):
1303 1305 source, revision, kind = state
1304 1306 self._fetch(source, revision)
1305 1307 base = self._gitcommand(['merge-base', revision, self._state[1]])
1306 1308 self._gitupdatestat()
1307 1309 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1308 1310
1309 1311 def mergefunc():
1310 1312 if base == revision:
1311 1313 self.get(state) # fast forward merge
1312 1314 elif base != self._state[1]:
1313 1315 self._gitcommand(['merge', '--no-commit', revision])
1314 1316
1315 1317 if self.dirty():
1316 1318 if self._gitstate() != revision:
1317 1319 dirty = self._gitstate() == self._state[1] or code != 0
1318 1320 if _updateprompt(self._ui, self, dirty,
1319 1321 self._state[1][:7], revision[:7]):
1320 1322 mergefunc()
1321 1323 else:
1322 1324 mergefunc()
1323 1325
1324 1326 @annotatesubrepoerror
1325 1327 def push(self, opts):
1326 1328 force = opts.get('force')
1327 1329
1328 1330 if not self._state[1]:
1329 1331 return True
1330 1332 if self._gitmissing():
1331 1333 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1332 1334 # if a branch in origin contains the revision, nothing to do
1333 1335 branch2rev, rev2branch = self._gitbranchmap()
1334 1336 if self._state[1] in rev2branch:
1335 1337 for b in rev2branch[self._state[1]]:
1336 1338 if b.startswith('refs/remotes/origin/'):
1337 1339 return True
1338 1340 for b, revision in branch2rev.iteritems():
1339 1341 if b.startswith('refs/remotes/origin/'):
1340 1342 if self._gitisancestor(self._state[1], revision):
1341 1343 return True
1342 1344 # otherwise, try to push the currently checked out branch
1343 1345 cmd = ['push']
1344 1346 if force:
1345 1347 cmd.append('--force')
1346 1348
1347 1349 current = self._gitcurrentbranch()
1348 1350 if current:
1349 1351 # determine if the current branch is even useful
1350 1352 if not self._gitisancestor(self._state[1], current):
1351 1353 self._ui.warn(_('unrelated git branch checked out '
1352 1354 'in subrepo %s\n') % self._relpath)
1353 1355 return False
1354 1356 self._ui.status(_('pushing branch %s of subrepo %s\n') %
1355 1357 (current.split('/', 2)[2], self._relpath))
1356 1358 self._gitcommand(cmd + ['origin', current])
1357 1359 return True
1358 1360 else:
1359 1361 self._ui.warn(_('no branch checked out in subrepo %s\n'
1360 1362 'cannot push revision %s\n') %
1361 1363 (self._relpath, self._state[1]))
1362 1364 return False
1363 1365
1364 1366 @annotatesubrepoerror
1365 1367 def remove(self):
1366 1368 if self._gitmissing():
1367 1369 return
1368 1370 if self.dirty():
1369 1371 self._ui.warn(_('not removing repo %s because '
1370 1372 'it has changes.\n') % self._relpath)
1371 1373 return
1372 1374 # we can't fully delete the repository as it may contain
1373 1375 # local-only history
1374 1376 self._ui.note(_('removing subrepo %s\n') % self._relpath)
1375 1377 self._gitcommand(['config', 'core.bare', 'true'])
1376 1378 for f in os.listdir(self._abspath):
1377 1379 if f == '.git':
1378 1380 continue
1379 1381 path = os.path.join(self._abspath, f)
1380 1382 if os.path.isdir(path) and not os.path.islink(path):
1381 1383 shutil.rmtree(path)
1382 1384 else:
1383 1385 os.remove(path)
1384 1386
1385 1387 def archive(self, ui, archiver, prefix, match=None):
1388 total = 0
1386 1389 source, revision = self._state
1387 1390 if not revision:
1388 return
1391 return total
1389 1392 self._fetch(source, revision)
1390 1393
1391 1394 # Parse git's native archive command.
1392 1395 # This should be much faster than manually traversing the trees
1393 1396 # and objects with many subprocess calls.
1394 1397 tarstream = self._gitcommand(['archive', revision], stream=True)
1395 1398 tar = tarfile.open(fileobj=tarstream, mode='r|')
1396 1399 relpath = subrelpath(self)
1397 1400 ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1398 1401 for i, info in enumerate(tar):
1399 1402 if info.isdir():
1400 1403 continue
1401 1404 if match and not match(info.name):
1402 1405 continue
1403 1406 if info.issym():
1404 1407 data = info.linkname
1405 1408 else:
1406 1409 data = tar.extractfile(info).read()
1407 1410 archiver.addfile(os.path.join(prefix, self._path, info.name),
1408 1411 info.mode, info.issym(), data)
1412 total += 1
1409 1413 ui.progress(_('archiving (%s)') % relpath, i + 1,
1410 1414 unit=_('files'))
1411 1415 ui.progress(_('archiving (%s)') % relpath, None)
1416 return total
1412 1417
1413 1418
1414 1419 @annotatesubrepoerror
1415 1420 def status(self, rev2, **opts):
1416 1421 rev1 = self._state[1]
1417 1422 if self._gitmissing() or not rev1:
1418 1423 # if the repo is missing, return no results
1419 1424 return [], [], [], [], [], [], []
1420 1425 modified, added, removed = [], [], []
1421 1426 self._gitupdatestat()
1422 1427 if rev2:
1423 1428 command = ['diff-tree', rev1, rev2]
1424 1429 else:
1425 1430 command = ['diff-index', rev1]
1426 1431 out = self._gitcommand(command)
1427 1432 for line in out.split('\n'):
1428 1433 tab = line.find('\t')
1429 1434 if tab == -1:
1430 1435 continue
1431 1436 status, f = line[tab - 1], line[tab + 1:]
1432 1437 if status == 'M':
1433 1438 modified.append(f)
1434 1439 elif status == 'A':
1435 1440 added.append(f)
1436 1441 elif status == 'D':
1437 1442 removed.append(f)
1438 1443
1439 1444 deleted = unknown = ignored = clean = []
1440 1445 return modified, added, removed, deleted, unknown, ignored, clean
1441 1446
1442 1447 types = {
1443 1448 'hg': hgsubrepo,
1444 1449 'svn': svnsubrepo,
1445 1450 'git': gitsubrepo,
1446 1451 }
@@ -1,320 +1,330 b''
1 1 $ "$TESTDIR/hghave" serve || exit 80
2 2
3 3 $ hg init test
4 4 $ cd test
5 5 $ echo foo>foo
6 6 $ hg commit -Am 1 -d '1 0'
7 7 adding foo
8 8 $ echo bar>bar
9 9 $ hg commit -Am 2 -d '2 0'
10 10 adding bar
11 11 $ mkdir baz
12 12 $ echo bletch>baz/bletch
13 13 $ hg commit -Am 3 -d '1000000000 0'
14 14 adding baz/bletch
15 15 $ echo "[web]" >> .hg/hgrc
16 16 $ echo "name = test-archive" >> .hg/hgrc
17 17 $ cp .hg/hgrc .hg/hgrc-base
18 18 > test_archtype() {
19 19 > echo "allow_archive = $1" >> .hg/hgrc
20 20 > hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
21 21 > cat hg.pid >> $DAEMON_PIDS
22 22 > echo % $1 allowed should give 200
23 23 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$2" | head -n 1
24 24 > echo % $3 and $4 disallowed should both give 403
25 25 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$3" | head -n 1
26 26 > "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$4" | head -n 1
27 27 > "$TESTDIR/killdaemons.py" $DAEMON_PIDS
28 28 > cat errors.log
29 29 > cp .hg/hgrc-base .hg/hgrc
30 30 > }
31 31
32 32 check http return codes
33 33
34 34 $ test_archtype gz tar.gz tar.bz2 zip
35 35 % gz allowed should give 200
36 36 200 Script output follows
37 37 % tar.bz2 and zip disallowed should both give 403
38 38 403 Archive type not allowed: bz2
39 39 403 Archive type not allowed: zip
40 40 $ test_archtype bz2 tar.bz2 zip tar.gz
41 41 % bz2 allowed should give 200
42 42 200 Script output follows
43 43 % zip and tar.gz disallowed should both give 403
44 44 403 Archive type not allowed: zip
45 45 403 Archive type not allowed: gz
46 46 $ test_archtype zip zip tar.gz tar.bz2
47 47 % zip allowed should give 200
48 48 200 Script output follows
49 49 % tar.gz and tar.bz2 disallowed should both give 403
50 50 403 Archive type not allowed: gz
51 51 403 Archive type not allowed: bz2
52 52
53 53 $ echo "allow_archive = gz bz2 zip" >> .hg/hgrc
54 54 $ hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
55 55 $ cat hg.pid >> $DAEMON_PIDS
56 56
57 57 invalid arch type should give 404
58 58
59 59 $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.invalid" | head -n 1
60 60 404 Unsupported archive type: None
61 61
62 62 $ TIP=`hg id -v | cut -f1 -d' '`
63 63 $ QTIP=`hg id -q`
64 64 $ cat > getarchive.py <<EOF
65 65 > import os, sys, urllib2
66 66 > try:
67 67 > # Set stdout to binary mode for win32 platforms
68 68 > import msvcrt
69 69 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
70 70 > except ImportError:
71 71 > pass
72 72 > if len(sys.argv) <= 3:
73 73 > node, archive = sys.argv[1:]
74 74 > requeststr = 'cmd=archive;node=%s;type=%s' % (node, archive)
75 75 > else:
76 76 > node, archive, file = sys.argv[1:]
77 77 > requeststr = 'cmd=archive;node=%s;type=%s;file=%s' % (node, archive, file)
78 78 > try:
79 79 > f = urllib2.urlopen('http://127.0.0.1:%s/?%s'
80 80 > % (os.environ['HGPORT'], requeststr))
81 81 > sys.stdout.write(f.read())
82 82 > except urllib2.HTTPError, e:
83 83 > sys.stderr.write(str(e) + '\n')
84 84 > EOF
85 85 $ python getarchive.py "$TIP" gz | gunzip | tar tf - 2>/dev/null
86 86 test-archive-2c0277f05ed4/.hg_archival.txt
87 87 test-archive-2c0277f05ed4/bar
88 88 test-archive-2c0277f05ed4/baz/bletch
89 89 test-archive-2c0277f05ed4/foo
90 90 $ python getarchive.py "$TIP" bz2 | bunzip2 | tar tf - 2>/dev/null
91 91 test-archive-2c0277f05ed4/.hg_archival.txt
92 92 test-archive-2c0277f05ed4/bar
93 93 test-archive-2c0277f05ed4/baz/bletch
94 94 test-archive-2c0277f05ed4/foo
95 95 $ python getarchive.py "$TIP" zip > archive.zip
96 96 $ unzip -t archive.zip
97 97 Archive: archive.zip
98 98 testing: test-archive-2c0277f05ed4/.hg_archival.txt OK
99 99 testing: test-archive-2c0277f05ed4/bar OK
100 100 testing: test-archive-2c0277f05ed4/baz/bletch OK
101 101 testing: test-archive-2c0277f05ed4/foo OK
102 102 No errors detected in compressed data of archive.zip.
103 103
104 104 test that we can download single directories and files
105 105
106 106 $ python getarchive.py "$TIP" gz baz | gunzip | tar tf - 2>/dev/null
107 107 test-archive-2c0277f05ed4/baz/bletch
108 108 $ python getarchive.py "$TIP" gz foo | gunzip | tar tf - 2>/dev/null
109 109 test-archive-2c0277f05ed4/foo
110 110
111 111 test that we reject unsafe patterns
112 112
113 113 $ python getarchive.py "$TIP" gz relre:baz
114 114 HTTP Error 403: Archive pattern not allowed: relre:baz
115 115
116 116 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
117 117
118 118 $ hg archive -t tar test.tar
119 119 $ tar tf test.tar
120 120 test/.hg_archival.txt
121 121 test/bar
122 122 test/baz/bletch
123 123 test/foo
124 124
125 125 $ hg archive --debug -t tbz2 -X baz test.tar.bz2
126 126 archiving: 0/2 files (0.00%)
127 127 archiving: bar 1/2 files (50.00%)
128 128 archiving: foo 2/2 files (100.00%)
129 129 $ bunzip2 -dc test.tar.bz2 | tar tf - 2>/dev/null
130 130 test/.hg_archival.txt
131 131 test/bar
132 132 test/foo
133 133
134 134 $ hg archive -t tgz -p %b-%h test-%h.tar.gz
135 135 $ gzip -dc test-$QTIP.tar.gz | tar tf - 2>/dev/null
136 136 test-2c0277f05ed4/.hg_archival.txt
137 137 test-2c0277f05ed4/bar
138 138 test-2c0277f05ed4/baz/bletch
139 139 test-2c0277f05ed4/foo
140 140
141 141 $ hg archive autodetected_test.tar
142 142 $ tar tf autodetected_test.tar
143 143 autodetected_test/.hg_archival.txt
144 144 autodetected_test/bar
145 145 autodetected_test/baz/bletch
146 146 autodetected_test/foo
147 147
148 148 The '-t' should override autodetection
149 149
150 150 $ hg archive -t tar autodetect_override_test.zip
151 151 $ tar tf autodetect_override_test.zip
152 152 autodetect_override_test.zip/.hg_archival.txt
153 153 autodetect_override_test.zip/bar
154 154 autodetect_override_test.zip/baz/bletch
155 155 autodetect_override_test.zip/foo
156 156
157 157 $ for ext in tar tar.gz tgz tar.bz2 tbz2 zip; do
158 158 > hg archive auto_test.$ext
159 159 > if [ -d auto_test.$ext ]; then
160 160 > echo "extension $ext was not autodetected."
161 161 > fi
162 162 > done
163 163
164 164 $ cat > md5comp.py <<EOF
165 165 > try:
166 166 > from hashlib import md5
167 167 > except ImportError:
168 168 > from md5 import md5
169 169 > import sys
170 170 > f1, f2 = sys.argv[1:3]
171 171 > h1 = md5(file(f1, 'rb').read()).hexdigest()
172 172 > h2 = md5(file(f2, 'rb').read()).hexdigest()
173 173 > print h1 == h2 or "md5 differ: " + repr((h1, h2))
174 174 > EOF
175 175
176 176 archive name is stored in the archive, so create similar archives and
177 177 rename them afterwards.
178 178
179 179 $ hg archive -t tgz tip.tar.gz
180 180 $ mv tip.tar.gz tip1.tar.gz
181 181 $ sleep 1
182 182 $ hg archive -t tgz tip.tar.gz
183 183 $ mv tip.tar.gz tip2.tar.gz
184 184 $ python md5comp.py tip1.tar.gz tip2.tar.gz
185 185 True
186 186
187 187 $ hg archive -t zip -p /illegal test.zip
188 188 abort: archive prefix contains illegal components
189 189 [255]
190 190 $ hg archive -t zip -p very/../bad test.zip
191 191
192 192 $ hg archive --config ui.archivemeta=false -t zip -r 2 test.zip
193 193 $ unzip -t test.zip
194 194 Archive: test.zip
195 195 testing: test/bar OK
196 196 testing: test/baz/bletch OK
197 197 testing: test/foo OK
198 198 No errors detected in compressed data of test.zip.
199 199
200 200 $ hg archive -t tar - | tar tf - 2>/dev/null
201 201 test-2c0277f05ed4/.hg_archival.txt
202 202 test-2c0277f05ed4/bar
203 203 test-2c0277f05ed4/baz/bletch
204 204 test-2c0277f05ed4/foo
205 205
206 206 $ hg archive -r 0 -t tar rev-%r.tar
207 207 $ if [ -f rev-0.tar ]; then
208 208 $ fi
209 209
210 210 test .hg_archival.txt
211 211
212 212 $ hg archive ../test-tags
213 213 $ cat ../test-tags/.hg_archival.txt
214 214 repo: daa7f7c60e0a224faa4ff77ca41b2760562af264
215 215 node: 2c0277f05ed49d1c8328fb9ba92fba7a5ebcb33e
216 216 branch: default
217 217 latesttag: null
218 218 latesttagdistance: 3
219 219 $ hg tag -r 2 mytag
220 220 $ hg tag -r 2 anothertag
221 221 $ hg archive -r 2 ../test-lasttag
222 222 $ cat ../test-lasttag/.hg_archival.txt
223 223 repo: daa7f7c60e0a224faa4ff77ca41b2760562af264
224 224 node: 2c0277f05ed49d1c8328fb9ba92fba7a5ebcb33e
225 225 branch: default
226 226 tag: anothertag
227 227 tag: mytag
228 228
229 229 $ hg archive -t bogus test.bogus
230 230 abort: unknown archive type 'bogus'
231 231 [255]
232 232
233 233 enable progress extension:
234 234
235 235 $ cp $HGRCPATH $HGRCPATH.no-progress
236 236 $ cat >> $HGRCPATH <<EOF
237 237 > [extensions]
238 238 > progress =
239 239 > [progress]
240 240 > assume-tty = 1
241 241 > format = topic bar number
242 242 > delay = 0
243 243 > refresh = 0
244 244 > width = 60
245 245 > EOF
246 246
247 247 $ hg archive ../with-progress
248 248 \r (no-eol) (esc)
249 249 archiving [ ] 0/4\r (no-eol) (esc)
250 250 archiving [ ] 0/4\r (no-eol) (esc)
251 251 archiving [=========> ] 1/4\r (no-eol) (esc)
252 252 archiving [=========> ] 1/4\r (no-eol) (esc)
253 253 archiving [====================> ] 2/4\r (no-eol) (esc)
254 254 archiving [====================> ] 2/4\r (no-eol) (esc)
255 255 archiving [===============================> ] 3/4\r (no-eol) (esc)
256 256 archiving [===============================> ] 3/4\r (no-eol) (esc)
257 257 archiving [==========================================>] 4/4\r (no-eol) (esc)
258 258 archiving [==========================================>] 4/4\r (no-eol) (esc)
259 259 \r (no-eol) (esc)
260 260
261 261 cleanup after progress extension test:
262 262
263 263 $ cp $HGRCPATH.no-progress $HGRCPATH
264 264
265 265 server errors
266 266
267 267 $ cat errors.log
268 268
269 269 empty repo
270 270
271 271 $ hg init ../empty
272 272 $ cd ../empty
273 273 $ hg archive ../test-empty
274 274 abort: no working directory: please specify a revision
275 275 [255]
276 276
277 277 old file -- date clamped to 1980
278 278
279 279 $ touch -t 197501010000 old
280 280 $ hg add old
281 281 $ hg commit -m old
282 282 $ hg archive ../old.zip
283 283 $ unzip -l ../old.zip
284 284 Archive: ../old.zip
285 285 \s*Length.* (re)
286 286 *-----* (glob)
287 287 *147*80*00:00*old/.hg_archival.txt (glob)
288 288 *0*80*00:00*old/old (glob)
289 289 *-----* (glob)
290 290 \s*147\s+2 files (re)
291 291
292 show an error when a provided pattern matches no files
293
294 $ hg archive -I file_that_does_not_exist.foo ../empty.zip
295 abort: no files match the archive pattern
296 [255]
297
298 $ hg archive -X * ../empty.zip
299 abort: no files match the archive pattern
300 [255]
301
292 302 $ cd ..
293 303
294 304 issue3600: check whether "hg archive" can create archive files which
295 305 are extracted with expected timestamp, even though TZ is not
296 306 configured as GMT.
297 307
298 308 $ mkdir issue3600
299 309 $ cd issue3600
300 310
301 311 $ hg init repo
302 312 $ echo a > repo/a
303 313 $ hg -R repo add repo/a
304 314 $ hg -R repo commit -m '#0' -d '456789012 21600'
305 315 $ cat > show_mtime.py <<EOF
306 316 > import sys, os
307 317 > print int(os.stat(sys.argv[1]).st_mtime)
308 318 > EOF
309 319
310 320 $ hg -R repo archive --prefix tar-extracted archive.tar
311 321 $ (TZ=UTC-3; export TZ; tar xf archive.tar)
312 322 $ python show_mtime.py tar-extracted/a
313 323 456789012
314 324
315 325 $ hg -R repo archive --prefix zip-extracted archive.zip
316 326 $ (TZ=UTC-3; export TZ; unzip -q archive.zip)
317 327 $ python show_mtime.py zip-extracted/a
318 328 456789012
319 329
320 330 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now