##// END OF EJS Templates
archive: call the storage prefetch hook
Matt Harbison -
r35943:533f04d4 default
parent child Browse files
Show More
@@ -1,358 +1,360 b''
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import gzip
11 11 import os
12 12 import struct
13 13 import tarfile
14 14 import time
15 15 import zipfile
16 16 import zlib
17 17
18 18 from .i18n import _
19 19
20 20 from . import (
21 cmdutil,
21 22 error,
22 23 formatter,
23 24 match as matchmod,
24 25 util,
25 26 vfs as vfsmod,
26 27 )
27 28 stringio = util.stringio
28 29
29 30 # from unzip source code:
30 31 _UNX_IFREG = 0x8000
31 32 _UNX_IFLNK = 0xa000
32 33
33 34 def tidyprefix(dest, kind, prefix):
34 35 '''choose prefix to use for names in archive. make sure prefix is
35 36 safe for consumers.'''
36 37
37 38 if prefix:
38 39 prefix = util.normpath(prefix)
39 40 else:
40 41 if not isinstance(dest, str):
41 42 raise ValueError('dest must be string if no prefix')
42 43 prefix = os.path.basename(dest)
43 44 lower = prefix.lower()
44 45 for sfx in exts.get(kind, []):
45 46 if lower.endswith(sfx):
46 47 prefix = prefix[:-len(sfx)]
47 48 break
48 49 lpfx = os.path.normpath(util.localpath(prefix))
49 50 prefix = util.pconvert(lpfx)
50 51 if not prefix.endswith('/'):
51 52 prefix += '/'
52 53 # Drop the leading '.' path component if present, so Windows can read the
53 54 # zip files (issue4634)
54 55 if prefix.startswith('./'):
55 56 prefix = prefix[2:]
56 57 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
57 58 raise error.Abort(_('archive prefix contains illegal components'))
58 59 return prefix
59 60
60 61 exts = {
61 62 'tar': ['.tar'],
62 63 'tbz2': ['.tbz2', '.tar.bz2'],
63 64 'tgz': ['.tgz', '.tar.gz'],
64 65 'zip': ['.zip'],
65 66 }
66 67
67 68 def guesskind(dest):
68 69 for kind, extensions in exts.iteritems():
69 70 if any(dest.endswith(ext) for ext in extensions):
70 71 return kind
71 72 return None
72 73
73 74 def _rootctx(repo):
74 75 # repo[0] may be hidden
75 76 for rev in repo:
76 77 return repo[rev]
77 78 return repo['null']
78 79
79 80 # {tags} on ctx includes local tags and 'tip', with no current way to limit
80 81 # that to global tags. Therefore, use {latesttag} as a substitute when
81 82 # the distance is 0, since that will be the list of global tags on ctx.
82 83 _defaultmetatemplate = br'''
83 84 repo: {root}
84 85 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
85 86 branch: {branch|utf8}
86 87 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
87 88 separate("\n",
88 89 join(latesttag % "latesttag: {tag}", "\n"),
89 90 "latesttagdistance: {latesttagdistance}",
90 91 "changessincelatesttag: {changessincelatesttag}"))}
91 92 '''[1:] # drop leading '\n'
92 93
93 94 def buildmetadata(ctx):
94 95 '''build content of .hg_archival.txt'''
95 96 repo = ctx.repo()
96 97
97 98 opts = {
98 99 'template': repo.ui.config('experimental', 'archivemetatemplate',
99 100 _defaultmetatemplate)
100 101 }
101 102
102 103 out = util.stringio()
103 104
104 105 fm = formatter.formatter(repo.ui, out, 'archive', opts)
105 106 fm.startitem()
106 107 fm.context(ctx=ctx)
107 108 fm.data(root=_rootctx(repo).hex())
108 109
109 110 if ctx.rev() is None:
110 111 dirty = ''
111 112 if ctx.dirty(missing=True):
112 113 dirty = '+'
113 114 fm.data(dirty=dirty)
114 115 fm.end()
115 116
116 117 return out.getvalue()
117 118
118 119 class tarit(object):
119 120 '''write archive to tar file or stream. can write uncompressed,
120 121 or compress with gzip or bzip2.'''
121 122
122 123 class GzipFileWithTime(gzip.GzipFile):
123 124
124 125 def __init__(self, *args, **kw):
125 126 timestamp = None
126 127 if 'timestamp' in kw:
127 128 timestamp = kw.pop(r'timestamp')
128 129 if timestamp is None:
129 130 self.timestamp = time.time()
130 131 else:
131 132 self.timestamp = timestamp
132 133 gzip.GzipFile.__init__(self, *args, **kw)
133 134
134 135 def _write_gzip_header(self):
135 136 self.fileobj.write('\037\213') # magic header
136 137 self.fileobj.write('\010') # compression method
137 138 fname = self.name
138 139 if fname and fname.endswith('.gz'):
139 140 fname = fname[:-3]
140 141 flags = 0
141 142 if fname:
142 143 flags = gzip.FNAME
143 144 self.fileobj.write(chr(flags))
144 145 gzip.write32u(self.fileobj, long(self.timestamp))
145 146 self.fileobj.write('\002')
146 147 self.fileobj.write('\377')
147 148 if fname:
148 149 self.fileobj.write(fname + '\000')
149 150
150 151 def __init__(self, dest, mtime, kind=''):
151 152 self.mtime = mtime
152 153 self.fileobj = None
153 154
154 155 def taropen(mode, name='', fileobj=None):
155 156 if kind == 'gz':
156 157 mode = mode[0]
157 158 if not fileobj:
158 159 fileobj = open(name, mode + 'b')
159 160 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
160 161 zlib.Z_BEST_COMPRESSION,
161 162 fileobj, timestamp=mtime)
162 163 self.fileobj = gzfileobj
163 164 return tarfile.TarFile.taropen(name, mode, gzfileobj)
164 165 else:
165 166 return tarfile.open(name, mode + kind, fileobj)
166 167
167 168 if isinstance(dest, str):
168 169 self.z = taropen('w:', name=dest)
169 170 else:
170 171 self.z = taropen('w|', fileobj=dest)
171 172
172 173 def addfile(self, name, mode, islink, data):
173 174 i = tarfile.TarInfo(name)
174 175 i.mtime = self.mtime
175 176 i.size = len(data)
176 177 if islink:
177 178 i.type = tarfile.SYMTYPE
178 179 i.mode = 0o777
179 180 i.linkname = data
180 181 data = None
181 182 i.size = 0
182 183 else:
183 184 i.mode = mode
184 185 data = stringio(data)
185 186 self.z.addfile(i, data)
186 187
187 188 def done(self):
188 189 self.z.close()
189 190 if self.fileobj:
190 191 self.fileobj.close()
191 192
192 193 class tellable(object):
193 194 '''provide tell method for zipfile.ZipFile when writing to http
194 195 response file object.'''
195 196
196 197 def __init__(self, fp):
197 198 self.fp = fp
198 199 self.offset = 0
199 200
200 201 def __getattr__(self, key):
201 202 return getattr(self.fp, key)
202 203
203 204 def write(self, s):
204 205 self.fp.write(s)
205 206 self.offset += len(s)
206 207
207 208 def tell(self):
208 209 return self.offset
209 210
210 211 class zipit(object):
211 212 '''write archive to zip file or stream. can write uncompressed,
212 213 or compressed with deflate.'''
213 214
214 215 def __init__(self, dest, mtime, compress=True):
215 216 if not isinstance(dest, str):
216 217 try:
217 218 dest.tell()
218 219 except (AttributeError, IOError):
219 220 dest = tellable(dest)
220 221 self.z = zipfile.ZipFile(dest, 'w',
221 222 compress and zipfile.ZIP_DEFLATED or
222 223 zipfile.ZIP_STORED)
223 224
224 225 # Python's zipfile module emits deprecation warnings if we try
225 226 # to store files with a date before 1980.
226 227 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
227 228 if mtime < epoch:
228 229 mtime = epoch
229 230
230 231 self.mtime = mtime
231 232 self.date_time = time.gmtime(mtime)[:6]
232 233
233 234 def addfile(self, name, mode, islink, data):
234 235 i = zipfile.ZipInfo(name, self.date_time)
235 236 i.compress_type = self.z.compression
236 237 # unzip will not honor unix file modes unless file creator is
237 238 # set to unix (id 3).
238 239 i.create_system = 3
239 240 ftype = _UNX_IFREG
240 241 if islink:
241 242 mode = 0o777
242 243 ftype = _UNX_IFLNK
243 244 i.external_attr = (mode | ftype) << 16
244 245 # add "extended-timestamp" extra block, because zip archives
245 246 # without this will be extracted with unexpected timestamp,
246 247 # if TZ is not configured as GMT
247 248 i.extra += struct.pack('<hhBl',
248 249 0x5455, # block type: "extended-timestamp"
249 250 1 + 4, # size of this block
250 251 1, # "modification time is present"
251 252 int(self.mtime)) # last modification (UTC)
252 253 self.z.writestr(i, data)
253 254
254 255 def done(self):
255 256 self.z.close()
256 257
257 258 class fileit(object):
258 259 '''write archive as files in directory.'''
259 260
260 261 def __init__(self, name, mtime):
261 262 self.basedir = name
262 263 self.opener = vfsmod.vfs(self.basedir)
263 264 self.mtime = mtime
264 265
265 266 def addfile(self, name, mode, islink, data):
266 267 if islink:
267 268 self.opener.symlink(data, name)
268 269 return
269 270 f = self.opener(name, "w", atomictemp=True)
270 271 f.write(data)
271 272 f.close()
272 273 destfile = os.path.join(self.basedir, name)
273 274 os.chmod(destfile, mode)
274 275 if self.mtime is not None:
275 276 os.utime(destfile, (self.mtime, self.mtime))
276 277
277 278 def done(self):
278 279 pass
279 280
280 281 archivers = {
281 282 'files': fileit,
282 283 'tar': tarit,
283 284 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
284 285 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
285 286 'uzip': lambda name, mtime: zipit(name, mtime, False),
286 287 'zip': zipit,
287 288 }
288 289
289 290 def archive(repo, dest, node, kind, decode=True, matchfn=None,
290 291 prefix='', mtime=None, subrepos=False):
291 292 '''create archive of repo as it was at node.
292 293
293 294 dest can be name of directory, name of archive file, or file
294 295 object to write archive to.
295 296
296 297 kind is type of archive to create.
297 298
298 299 decode tells whether to put files through decode filters from
299 300 hgrc.
300 301
301 302 matchfn is function to filter names of files to write to archive.
302 303
303 304 prefix is name of path to put before every archive member.
304 305
305 306 mtime is the modified time, in seconds, or None to use the changeset time.
306 307
307 308 subrepos tells whether to include subrepos.
308 309 '''
309 310
310 311 if kind == 'files':
311 312 if prefix:
312 313 raise error.Abort(_('cannot give prefix when archiving to files'))
313 314 else:
314 315 prefix = tidyprefix(dest, kind, prefix)
315 316
316 317 def write(name, mode, islink, getdata):
317 318 data = getdata()
318 319 if decode:
319 320 data = repo.wwritedata(name, data)
320 321 archiver.addfile(prefix + name, mode, islink, data)
321 322
322 323 if kind not in archivers:
323 324 raise error.Abort(_("unknown archive type '%s'") % kind)
324 325
325 326 ctx = repo[node]
326 327 archiver = archivers[kind](dest, mtime or ctx.date()[0])
327 328
328 329 if repo.ui.configbool("ui", "archivemeta"):
329 330 name = '.hg_archival.txt'
330 331 if not matchfn or matchfn(name):
331 332 write(name, 0o644, False, lambda: buildmetadata(ctx))
332 333
333 334 if matchfn:
334 335 files = [f for f in ctx.manifest().keys() if matchfn(f)]
335 336 else:
336 337 files = ctx.manifest().keys()
337 338 total = len(files)
338 339 if total:
339 340 files.sort()
341 cmdutil._prefetchfiles(repo, ctx, files)
340 342 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
341 343 for i, f in enumerate(files):
342 344 ff = ctx.flags(f)
343 345 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
344 346 repo.ui.progress(_('archiving'), i + 1, item=f,
345 347 unit=_('files'), total=total)
346 348 repo.ui.progress(_('archiving'), None)
347 349
348 350 if subrepos:
349 351 for subpath in sorted(ctx.substate):
350 352 sub = ctx.workingsub(subpath)
351 353 submatch = matchmod.subdirmatcher(subpath, matchfn)
352 354 total += sub.archive(archiver, prefix, submatch, decode)
353 355
354 356 if total == 0:
355 357 raise error.Abort(_('no files match the archive pattern'))
356 358
357 359 archiver.done()
358 360 return total
@@ -1,2151 +1,2155 b''
1 1 # subrepo.py - sub-repository handling for Mercurial
2 2 #
3 3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import copy
11 11 import errno
12 12 import hashlib
13 13 import os
14 14 import posixpath
15 15 import re
16 16 import stat
17 17 import subprocess
18 18 import sys
19 19 import tarfile
20 20 import xml.dom.minidom
21 21
22 22
23 23 from .i18n import _
24 24 from . import (
25 25 cmdutil,
26 26 config,
27 27 encoding,
28 28 error,
29 29 exchange,
30 30 filemerge,
31 31 logcmdutil,
32 32 match as matchmod,
33 33 node,
34 34 pathutil,
35 35 phases,
36 36 pycompat,
37 37 scmutil,
38 38 util,
39 39 vfs as vfsmod,
40 40 )
41 41
42 42 hg = None
43 43 propertycache = util.propertycache
44 44
45 45 nullstate = ('', '', 'empty')
46 46
47 47 def _expandedabspath(path):
48 48 '''
49 49 get a path or url and if it is a path expand it and return an absolute path
50 50 '''
51 51 expandedpath = util.urllocalpath(util.expandpath(path))
52 52 u = util.url(expandedpath)
53 53 if not u.scheme:
54 54 path = util.normpath(os.path.abspath(u.path))
55 55 return path
56 56
57 57 def _getstorehashcachename(remotepath):
58 58 '''get a unique filename for the store hash cache of a remote repository'''
59 59 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
60 60
61 61 class SubrepoAbort(error.Abort):
62 62 """Exception class used to avoid handling a subrepo error more than once"""
63 63 def __init__(self, *args, **kw):
64 64 self.subrepo = kw.pop(r'subrepo', None)
65 65 self.cause = kw.pop(r'cause', None)
66 66 error.Abort.__init__(self, *args, **kw)
67 67
68 68 def annotatesubrepoerror(func):
69 69 def decoratedmethod(self, *args, **kargs):
70 70 try:
71 71 res = func(self, *args, **kargs)
72 72 except SubrepoAbort as ex:
73 73 # This exception has already been handled
74 74 raise ex
75 75 except error.Abort as ex:
76 76 subrepo = subrelpath(self)
77 77 errormsg = str(ex) + ' ' + _('(in subrepository "%s")') % subrepo
78 78 # avoid handling this exception by raising a SubrepoAbort exception
79 79 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
80 80 cause=sys.exc_info())
81 81 return res
82 82 return decoratedmethod
83 83
84 84 def state(ctx, ui):
85 85 """return a state dict, mapping subrepo paths configured in .hgsub
86 86 to tuple: (source from .hgsub, revision from .hgsubstate, kind
87 87 (key in types dict))
88 88 """
89 89 p = config.config()
90 90 repo = ctx.repo()
91 91 def read(f, sections=None, remap=None):
92 92 if f in ctx:
93 93 try:
94 94 data = ctx[f].data()
95 95 except IOError as err:
96 96 if err.errno != errno.ENOENT:
97 97 raise
98 98 # handle missing subrepo spec files as removed
99 99 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
100 100 repo.pathto(f))
101 101 return
102 102 p.parse(f, data, sections, remap, read)
103 103 else:
104 104 raise error.Abort(_("subrepo spec file \'%s\' not found") %
105 105 repo.pathto(f))
106 106 if '.hgsub' in ctx:
107 107 read('.hgsub')
108 108
109 109 for path, src in ui.configitems('subpaths'):
110 110 p.set('subpaths', path, src, ui.configsource('subpaths', path))
111 111
112 112 rev = {}
113 113 if '.hgsubstate' in ctx:
114 114 try:
115 115 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
116 116 l = l.lstrip()
117 117 if not l:
118 118 continue
119 119 try:
120 120 revision, path = l.split(" ", 1)
121 121 except ValueError:
122 122 raise error.Abort(_("invalid subrepository revision "
123 123 "specifier in \'%s\' line %d")
124 124 % (repo.pathto('.hgsubstate'), (i + 1)))
125 125 rev[path] = revision
126 126 except IOError as err:
127 127 if err.errno != errno.ENOENT:
128 128 raise
129 129
130 130 def remap(src):
131 131 for pattern, repl in p.items('subpaths'):
132 132 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
133 133 # does a string decode.
134 134 repl = util.escapestr(repl)
135 135 # However, we still want to allow back references to go
136 136 # through unharmed, so we turn r'\\1' into r'\1'. Again,
137 137 # extra escapes are needed because re.sub string decodes.
138 138 repl = re.sub(br'\\\\([0-9]+)', br'\\\1', repl)
139 139 try:
140 140 src = re.sub(pattern, repl, src, 1)
141 141 except re.error as e:
142 142 raise error.Abort(_("bad subrepository pattern in %s: %s")
143 143 % (p.source('subpaths', pattern), e))
144 144 return src
145 145
146 146 state = {}
147 147 for path, src in p[''].items():
148 148 kind = 'hg'
149 149 if src.startswith('['):
150 150 if ']' not in src:
151 151 raise error.Abort(_('missing ] in subrepository source'))
152 152 kind, src = src.split(']', 1)
153 153 kind = kind[1:]
154 154 src = src.lstrip() # strip any extra whitespace after ']'
155 155
156 156 if not util.url(src).isabs():
157 157 parent = _abssource(repo, abort=False)
158 158 if parent:
159 159 parent = util.url(parent)
160 160 parent.path = posixpath.join(parent.path or '', src)
161 161 parent.path = posixpath.normpath(parent.path)
162 162 joined = str(parent)
163 163 # Remap the full joined path and use it if it changes,
164 164 # else remap the original source.
165 165 remapped = remap(joined)
166 166 if remapped == joined:
167 167 src = remap(src)
168 168 else:
169 169 src = remapped
170 170
171 171 src = remap(src)
172 172 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
173 173
174 174 return state
175 175
176 176 def writestate(repo, state):
177 177 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
178 178 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
179 179 if state[s][1] != nullstate[1]]
180 180 repo.wwrite('.hgsubstate', ''.join(lines), '')
181 181
182 182 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
183 183 """delegated from merge.applyupdates: merging of .hgsubstate file
184 184 in working context, merging context and ancestor context"""
185 185 if mctx == actx: # backwards?
186 186 actx = wctx.p1()
187 187 s1 = wctx.substate
188 188 s2 = mctx.substate
189 189 sa = actx.substate
190 190 sm = {}
191 191
192 192 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
193 193
194 194 def debug(s, msg, r=""):
195 195 if r:
196 196 r = "%s:%s:%s" % r
197 197 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
198 198
199 199 promptssrc = filemerge.partextras(labels)
200 200 for s, l in sorted(s1.iteritems()):
201 201 prompts = None
202 202 a = sa.get(s, nullstate)
203 203 ld = l # local state with possible dirty flag for compares
204 204 if wctx.sub(s).dirty():
205 205 ld = (l[0], l[1] + "+")
206 206 if wctx == actx: # overwrite
207 207 a = ld
208 208
209 209 prompts = promptssrc.copy()
210 210 prompts['s'] = s
211 211 if s in s2:
212 212 r = s2[s]
213 213 if ld == r or r == a: # no change or local is newer
214 214 sm[s] = l
215 215 continue
216 216 elif ld == a: # other side changed
217 217 debug(s, "other changed, get", r)
218 218 wctx.sub(s).get(r, overwrite)
219 219 sm[s] = r
220 220 elif ld[0] != r[0]: # sources differ
221 221 prompts['lo'] = l[0]
222 222 prompts['ro'] = r[0]
223 223 if repo.ui.promptchoice(
224 224 _(' subrepository sources for %(s)s differ\n'
225 225 'use (l)ocal%(l)s source (%(lo)s)'
226 226 ' or (r)emote%(o)s source (%(ro)s)?'
227 227 '$$ &Local $$ &Remote') % prompts, 0):
228 228 debug(s, "prompt changed, get", r)
229 229 wctx.sub(s).get(r, overwrite)
230 230 sm[s] = r
231 231 elif ld[1] == a[1]: # local side is unchanged
232 232 debug(s, "other side changed, get", r)
233 233 wctx.sub(s).get(r, overwrite)
234 234 sm[s] = r
235 235 else:
236 236 debug(s, "both sides changed")
237 237 srepo = wctx.sub(s)
238 238 prompts['sl'] = srepo.shortid(l[1])
239 239 prompts['sr'] = srepo.shortid(r[1])
240 240 option = repo.ui.promptchoice(
241 241 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
242 242 'remote revision: %(sr)s)\n'
243 243 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
244 244 '$$ &Merge $$ &Local $$ &Remote')
245 245 % prompts, 0)
246 246 if option == 0:
247 247 wctx.sub(s).merge(r)
248 248 sm[s] = l
249 249 debug(s, "merge with", r)
250 250 elif option == 1:
251 251 sm[s] = l
252 252 debug(s, "keep local subrepo revision", l)
253 253 else:
254 254 wctx.sub(s).get(r, overwrite)
255 255 sm[s] = r
256 256 debug(s, "get remote subrepo revision", r)
257 257 elif ld == a: # remote removed, local unchanged
258 258 debug(s, "remote removed, remove")
259 259 wctx.sub(s).remove()
260 260 elif a == nullstate: # not present in remote or ancestor
261 261 debug(s, "local added, keep")
262 262 sm[s] = l
263 263 continue
264 264 else:
265 265 if repo.ui.promptchoice(
266 266 _(' local%(l)s changed subrepository %(s)s'
267 267 ' which remote%(o)s removed\n'
268 268 'use (c)hanged version or (d)elete?'
269 269 '$$ &Changed $$ &Delete') % prompts, 0):
270 270 debug(s, "prompt remove")
271 271 wctx.sub(s).remove()
272 272
273 273 for s, r in sorted(s2.items()):
274 274 prompts = None
275 275 if s in s1:
276 276 continue
277 277 elif s not in sa:
278 278 debug(s, "remote added, get", r)
279 279 mctx.sub(s).get(r)
280 280 sm[s] = r
281 281 elif r != sa[s]:
282 282 prompts = promptssrc.copy()
283 283 prompts['s'] = s
284 284 if repo.ui.promptchoice(
285 285 _(' remote%(o)s changed subrepository %(s)s'
286 286 ' which local%(l)s removed\n'
287 287 'use (c)hanged version or (d)elete?'
288 288 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
289 289 debug(s, "prompt recreate", r)
290 290 mctx.sub(s).get(r)
291 291 sm[s] = r
292 292
293 293 # record merged .hgsubstate
294 294 writestate(repo, sm)
295 295 return sm
296 296
297 297 def precommit(ui, wctx, status, match, force=False):
298 298 """Calculate .hgsubstate changes that should be applied before committing
299 299
300 300 Returns (subs, commitsubs, newstate) where
301 301 - subs: changed subrepos (including dirty ones)
302 302 - commitsubs: dirty subrepos which the caller needs to commit recursively
303 303 - newstate: new state dict which the caller must write to .hgsubstate
304 304
305 305 This also updates the given status argument.
306 306 """
307 307 subs = []
308 308 commitsubs = set()
309 309 newstate = wctx.substate.copy()
310 310
311 311 # only manage subrepos and .hgsubstate if .hgsub is present
312 312 if '.hgsub' in wctx:
313 313 # we'll decide whether to track this ourselves, thanks
314 314 for c in status.modified, status.added, status.removed:
315 315 if '.hgsubstate' in c:
316 316 c.remove('.hgsubstate')
317 317
318 318 # compare current state to last committed state
319 319 # build new substate based on last committed state
320 320 oldstate = wctx.p1().substate
321 321 for s in sorted(newstate.keys()):
322 322 if not match(s):
323 323 # ignore working copy, use old state if present
324 324 if s in oldstate:
325 325 newstate[s] = oldstate[s]
326 326 continue
327 327 if not force:
328 328 raise error.Abort(
329 329 _("commit with new subrepo %s excluded") % s)
330 330 dirtyreason = wctx.sub(s).dirtyreason(True)
331 331 if dirtyreason:
332 332 if not ui.configbool('ui', 'commitsubrepos'):
333 333 raise error.Abort(dirtyreason,
334 334 hint=_("use --subrepos for recursive commit"))
335 335 subs.append(s)
336 336 commitsubs.add(s)
337 337 else:
338 338 bs = wctx.sub(s).basestate()
339 339 newstate[s] = (newstate[s][0], bs, newstate[s][2])
340 340 if oldstate.get(s, (None, None, None))[1] != bs:
341 341 subs.append(s)
342 342
343 343 # check for removed subrepos
344 344 for p in wctx.parents():
345 345 r = [s for s in p.substate if s not in newstate]
346 346 subs += [s for s in r if match(s)]
347 347 if subs:
348 348 if (not match('.hgsub') and
349 349 '.hgsub' in (wctx.modified() + wctx.added())):
350 350 raise error.Abort(_("can't commit subrepos without .hgsub"))
351 351 status.modified.insert(0, '.hgsubstate')
352 352
353 353 elif '.hgsub' in status.removed:
354 354 # clean up .hgsubstate when .hgsub is removed
355 355 if ('.hgsubstate' in wctx and
356 356 '.hgsubstate' not in (status.modified + status.added +
357 357 status.removed)):
358 358 status.removed.insert(0, '.hgsubstate')
359 359
360 360 return subs, commitsubs, newstate
361 361
362 362 def _updateprompt(ui, sub, dirty, local, remote):
363 363 if dirty:
364 364 msg = (_(' subrepository sources for %s differ\n'
365 365 'use (l)ocal source (%s) or (r)emote source (%s)?'
366 366 '$$ &Local $$ &Remote')
367 367 % (subrelpath(sub), local, remote))
368 368 else:
369 369 msg = (_(' subrepository sources for %s differ (in checked out '
370 370 'version)\n'
371 371 'use (l)ocal source (%s) or (r)emote source (%s)?'
372 372 '$$ &Local $$ &Remote')
373 373 % (subrelpath(sub), local, remote))
374 374 return ui.promptchoice(msg, 0)
375 375
376 376 def reporelpath(repo):
377 377 """return path to this (sub)repo as seen from outermost repo"""
378 378 parent = repo
379 379 while util.safehasattr(parent, '_subparent'):
380 380 parent = parent._subparent
381 381 return repo.root[len(pathutil.normasprefix(parent.root)):]
382 382
383 383 def subrelpath(sub):
384 384 """return path to this subrepo as seen from outermost repo"""
385 385 return sub._relpath
386 386
387 387 def _abssource(repo, push=False, abort=True):
388 388 """return pull/push path of repo - either based on parent repo .hgsub info
389 389 or on the top repo config. Abort or return None if no source found."""
390 390 if util.safehasattr(repo, '_subparent'):
391 391 source = util.url(repo._subsource)
392 392 if source.isabs():
393 393 return bytes(source)
394 394 source.path = posixpath.normpath(source.path)
395 395 parent = _abssource(repo._subparent, push, abort=False)
396 396 if parent:
397 397 parent = util.url(util.pconvert(parent))
398 398 parent.path = posixpath.join(parent.path or '', source.path)
399 399 parent.path = posixpath.normpath(parent.path)
400 400 return bytes(parent)
401 401 else: # recursion reached top repo
402 402 path = None
403 403 if util.safehasattr(repo, '_subtoppath'):
404 404 path = repo._subtoppath
405 405 elif push and repo.ui.config('paths', 'default-push'):
406 406 path = repo.ui.config('paths', 'default-push')
407 407 elif repo.ui.config('paths', 'default'):
408 408 path = repo.ui.config('paths', 'default')
409 409 elif repo.shared():
410 410 # chop off the .hg component to get the default path form. This has
411 411 # already run through vfsmod.vfs(..., realpath=True), so it doesn't
412 412 # have problems with 'C:'
413 413 return os.path.dirname(repo.sharedpath)
414 414 if path:
415 415 # issue5770: 'C:\' and 'C:' are not equivalent paths. The former is
416 416 # as expected: an absolute path to the root of the C: drive. The
417 417 # latter is a relative path, and works like so:
418 418 #
419 419 # C:\>cd C:\some\path
420 420 # C:\>D:
421 421 # D:\>python -c "import os; print os.path.abspath('C:')"
422 422 # C:\some\path
423 423 #
424 424 # D:\>python -c "import os; print os.path.abspath('C:relative')"
425 425 # C:\some\path\relative
426 426 if util.hasdriveletter(path):
427 427 if len(path) == 2 or path[2:3] not in br'\/':
428 428 path = os.path.abspath(path)
429 429 return path
430 430
431 431 if abort:
432 432 raise error.Abort(_("default path for subrepository not found"))
433 433
434 434 def _sanitize(ui, vfs, ignore):
435 435 for dirname, dirs, names in vfs.walk():
436 436 for i, d in enumerate(dirs):
437 437 if d.lower() == ignore:
438 438 del dirs[i]
439 439 break
440 440 if vfs.basename(dirname).lower() != '.hg':
441 441 continue
442 442 for f in names:
443 443 if f.lower() == 'hgrc':
444 444 ui.warn(_("warning: removing potentially hostile 'hgrc' "
445 445 "in '%s'\n") % vfs.join(dirname))
446 446 vfs.unlink(vfs.reljoin(dirname, f))
447 447
448 448 def _auditsubrepopath(repo, path):
449 449 # auditor doesn't check if the path itself is a symlink
450 450 pathutil.pathauditor(repo.root)(path)
451 451 if repo.wvfs.islink(path):
452 452 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
453 453
454 454 SUBREPO_ALLOWED_DEFAULTS = {
455 455 'hg': True,
456 456 'git': False,
457 457 'svn': False,
458 458 }
459 459
460 460 def _checktype(ui, kind):
461 461 # subrepos.allowed is a master kill switch. If disabled, subrepos are
462 462 # disabled period.
463 463 if not ui.configbool('subrepos', 'allowed', True):
464 464 raise error.Abort(_('subrepos not enabled'),
465 465 hint=_("see 'hg help config.subrepos' for details"))
466 466
467 467 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
468 468 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
469 469 raise error.Abort(_('%s subrepos not allowed') % kind,
470 470 hint=_("see 'hg help config.subrepos' for details"))
471 471
472 472 if kind not in types:
473 473 raise error.Abort(_('unknown subrepo type %s') % kind)
474 474
475 475 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
476 476 """return instance of the right subrepo class for subrepo in path"""
477 477 # subrepo inherently violates our import layering rules
478 478 # because it wants to make repo objects from deep inside the stack
479 479 # so we manually delay the circular imports to not break
480 480 # scripts that don't use our demand-loading
481 481 global hg
482 482 from . import hg as h
483 483 hg = h
484 484
485 485 repo = ctx.repo()
486 486 _auditsubrepopath(repo, path)
487 487 state = ctx.substate[path]
488 488 _checktype(repo.ui, state[2])
489 489 if allowwdir:
490 490 state = (state[0], ctx.subrev(path), state[2])
491 491 return types[state[2]](ctx, path, state[:2], allowcreate)
492 492
493 493 def nullsubrepo(ctx, path, pctx):
494 494 """return an empty subrepo in pctx for the extant subrepo in ctx"""
495 495 # subrepo inherently violates our import layering rules
496 496 # because it wants to make repo objects from deep inside the stack
497 497 # so we manually delay the circular imports to not break
498 498 # scripts that don't use our demand-loading
499 499 global hg
500 500 from . import hg as h
501 501 hg = h
502 502
503 503 repo = ctx.repo()
504 504 _auditsubrepopath(repo, path)
505 505 state = ctx.substate[path]
506 506 _checktype(repo.ui, state[2])
507 507 subrev = ''
508 508 if state[2] == 'hg':
509 509 subrev = "0" * 40
510 510 return types[state[2]](pctx, path, (state[0], subrev), True)
511 511
512 512 def newcommitphase(ui, ctx):
513 513 commitphase = phases.newcommitphase(ui)
514 514 substate = getattr(ctx, "substate", None)
515 515 if not substate:
516 516 return commitphase
517 517 check = ui.config('phases', 'checksubrepos')
518 518 if check not in ('ignore', 'follow', 'abort'):
519 519 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
520 520 % (check))
521 521 if check == 'ignore':
522 522 return commitphase
523 523 maxphase = phases.public
524 524 maxsub = None
525 525 for s in sorted(substate):
526 526 sub = ctx.sub(s)
527 527 subphase = sub.phase(substate[s][1])
528 528 if maxphase < subphase:
529 529 maxphase = subphase
530 530 maxsub = s
531 531 if commitphase < maxphase:
532 532 if check == 'abort':
533 533 raise error.Abort(_("can't commit in %s phase"
534 534 " conflicting %s from subrepository %s") %
535 535 (phases.phasenames[commitphase],
536 536 phases.phasenames[maxphase], maxsub))
537 537 ui.warn(_("warning: changes are committed in"
538 538 " %s phase from subrepository %s\n") %
539 539 (phases.phasenames[maxphase], maxsub))
540 540 return maxphase
541 541 return commitphase
542 542
543 543 # subrepo classes need to implement the following abstract class:
544 544
545 545 class abstractsubrepo(object):
546 546
547 547 def __init__(self, ctx, path):
548 548 """Initialize abstractsubrepo part
549 549
550 550 ``ctx`` is the context referring this subrepository in the
551 551 parent repository.
552 552
553 553 ``path`` is the path to this subrepository as seen from
554 554 innermost repository.
555 555 """
556 556 self.ui = ctx.repo().ui
557 557 self._ctx = ctx
558 558 self._path = path
559 559
560 560 def addwebdirpath(self, serverpath, webconf):
561 561 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
562 562
563 563 ``serverpath`` is the path component of the URL for this repo.
564 564
565 565 ``webconf`` is the dictionary of hgwebdir entries.
566 566 """
567 567 pass
568 568
569 569 def storeclean(self, path):
570 570 """
571 571 returns true if the repository has not changed since it was last
572 572 cloned from or pushed to a given repository.
573 573 """
574 574 return False
575 575
576 576 def dirty(self, ignoreupdate=False, missing=False):
577 577 """returns true if the dirstate of the subrepo is dirty or does not
578 578 match current stored state. If ignoreupdate is true, only check
579 579 whether the subrepo has uncommitted changes in its dirstate. If missing
580 580 is true, check for deleted files.
581 581 """
582 582 raise NotImplementedError
583 583
584 584 def dirtyreason(self, ignoreupdate=False, missing=False):
585 585 """return reason string if it is ``dirty()``
586 586
587 587 Returned string should have enough information for the message
588 588 of exception.
589 589
590 590 This returns None, otherwise.
591 591 """
592 592 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
593 593 return _('uncommitted changes in subrepository "%s"'
594 594 ) % subrelpath(self)
595 595
596 596 def bailifchanged(self, ignoreupdate=False, hint=None):
597 597 """raise Abort if subrepository is ``dirty()``
598 598 """
599 599 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
600 600 missing=True)
601 601 if dirtyreason:
602 602 raise error.Abort(dirtyreason, hint=hint)
603 603
604 604 def basestate(self):
605 605 """current working directory base state, disregarding .hgsubstate
606 606 state and working directory modifications"""
607 607 raise NotImplementedError
608 608
609 609 def checknested(self, path):
610 610 """check if path is a subrepository within this repository"""
611 611 return False
612 612
613 613 def commit(self, text, user, date):
614 614 """commit the current changes to the subrepo with the given
615 615 log message. Use given user and date if possible. Return the
616 616 new state of the subrepo.
617 617 """
618 618 raise NotImplementedError
619 619
620 620 def phase(self, state):
621 621 """returns phase of specified state in the subrepository.
622 622 """
623 623 return phases.public
624 624
625 625 def remove(self):
626 626 """remove the subrepo
627 627
628 628 (should verify the dirstate is not dirty first)
629 629 """
630 630 raise NotImplementedError
631 631
632 632 def get(self, state, overwrite=False):
633 633 """run whatever commands are needed to put the subrepo into
634 634 this state
635 635 """
636 636 raise NotImplementedError
637 637
638 638 def merge(self, state):
639 639 """merge currently-saved state with the new state."""
640 640 raise NotImplementedError
641 641
642 642 def push(self, opts):
643 643 """perform whatever action is analogous to 'hg push'
644 644
645 645 This may be a no-op on some systems.
646 646 """
647 647 raise NotImplementedError
648 648
649 649 def add(self, ui, match, prefix, explicitonly, **opts):
650 650 return []
651 651
652 652 def addremove(self, matcher, prefix, opts, dry_run, similarity):
653 653 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
654 654 return 1
655 655
656 656 def cat(self, match, fm, fntemplate, prefix, **opts):
657 657 return 1
658 658
659 659 def status(self, rev2, **opts):
660 660 return scmutil.status([], [], [], [], [], [], [])
661 661
662 662 def diff(self, ui, diffopts, node2, match, prefix, **opts):
663 663 pass
664 664
665 665 def outgoing(self, ui, dest, opts):
666 666 return 1
667 667
668 668 def incoming(self, ui, source, opts):
669 669 return 1
670 670
671 671 def files(self):
672 672 """return filename iterator"""
673 673 raise NotImplementedError
674 674
675 675 def filedata(self, name, decode):
676 676 """return file data, optionally passed through repo decoders"""
677 677 raise NotImplementedError
678 678
679 679 def fileflags(self, name):
680 680 """return file flags"""
681 681 return ''
682 682
683 683 def getfileset(self, expr):
684 684 """Resolve the fileset expression for this repo"""
685 685 return set()
686 686
687 687 def printfiles(self, ui, m, fm, fmt, subrepos):
688 688 """handle the files command for this subrepo"""
689 689 return 1
690 690
691 691 def archive(self, archiver, prefix, match=None, decode=True):
692 692 if match is not None:
693 693 files = [f for f in self.files() if match(f)]
694 694 else:
695 695 files = self.files()
696 696 total = len(files)
697 697 relpath = subrelpath(self)
698 698 self.ui.progress(_('archiving (%s)') % relpath, 0,
699 699 unit=_('files'), total=total)
700 700 for i, name in enumerate(files):
701 701 flags = self.fileflags(name)
702 702 mode = 'x' in flags and 0o755 or 0o644
703 703 symlink = 'l' in flags
704 704 archiver.addfile(prefix + self._path + '/' + name,
705 705 mode, symlink, self.filedata(name, decode))
706 706 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
707 707 unit=_('files'), total=total)
708 708 self.ui.progress(_('archiving (%s)') % relpath, None)
709 709 return total
710 710
711 711 def walk(self, match):
712 712 '''
713 713 walk recursively through the directory tree, finding all files
714 714 matched by the match function
715 715 '''
716 716
717 717 def forget(self, match, prefix):
718 718 return ([], [])
719 719
720 720 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
721 721 """remove the matched files from the subrepository and the filesystem,
722 722 possibly by force and/or after the file has been removed from the
723 723 filesystem. Return 0 on success, 1 on any warning.
724 724 """
725 725 warnings.append(_("warning: removefiles not implemented (%s)")
726 726 % self._path)
727 727 return 1
728 728
729 729 def revert(self, substate, *pats, **opts):
730 730 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
731 731 % (substate[0], substate[2]))
732 732 return []
733 733
734 734 def shortid(self, revid):
735 735 return revid
736 736
737 737 def unshare(self):
738 738 '''
739 739 convert this repository from shared to normal storage.
740 740 '''
741 741
742 742 def verify(self):
743 743 '''verify the integrity of the repository. Return 0 on success or
744 744 warning, 1 on any error.
745 745 '''
746 746 return 0
747 747
748 748 @propertycache
749 749 def wvfs(self):
750 750 """return vfs to access the working directory of this subrepository
751 751 """
752 752 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
753 753
754 754 @propertycache
755 755 def _relpath(self):
756 756 """return path to this subrepository as seen from outermost repository
757 757 """
758 758 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
759 759
760 760 class hgsubrepo(abstractsubrepo):
761 761 def __init__(self, ctx, path, state, allowcreate):
762 762 super(hgsubrepo, self).__init__(ctx, path)
763 763 self._state = state
764 764 r = ctx.repo()
765 765 root = r.wjoin(path)
766 766 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
767 767 self._repo = hg.repository(r.baseui, root, create=create)
768 768
769 769 # Propagate the parent's --hidden option
770 770 if r is r.unfiltered():
771 771 self._repo = self._repo.unfiltered()
772 772
773 773 self.ui = self._repo.ui
774 774 for s, k in [('ui', 'commitsubrepos')]:
775 775 v = r.ui.config(s, k)
776 776 if v:
777 777 self.ui.setconfig(s, k, v, 'subrepo')
778 778 # internal config: ui._usedassubrepo
779 779 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
780 780 self._initrepo(r, state[0], create)
781 781
782 782 @annotatesubrepoerror
783 783 def addwebdirpath(self, serverpath, webconf):
784 784 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
785 785
786 786 def storeclean(self, path):
787 787 with self._repo.lock():
788 788 return self._storeclean(path)
789 789
790 790 def _storeclean(self, path):
791 791 clean = True
792 792 itercache = self._calcstorehash(path)
793 793 for filehash in self._readstorehashcache(path):
794 794 if filehash != next(itercache, None):
795 795 clean = False
796 796 break
797 797 if clean:
798 798 # if not empty:
799 799 # the cached and current pull states have a different size
800 800 clean = next(itercache, None) is None
801 801 return clean
802 802
803 803 def _calcstorehash(self, remotepath):
804 804 '''calculate a unique "store hash"
805 805
806 806 This method is used to to detect when there are changes that may
807 807 require a push to a given remote path.'''
808 808 # sort the files that will be hashed in increasing (likely) file size
809 809 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
810 810 yield '# %s\n' % _expandedabspath(remotepath)
811 811 vfs = self._repo.vfs
812 812 for relname in filelist:
813 813 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
814 814 yield '%s = %s\n' % (relname, filehash)
815 815
816 816 @propertycache
817 817 def _cachestorehashvfs(self):
818 818 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
819 819
820 820 def _readstorehashcache(self, remotepath):
821 821 '''read the store hash cache for a given remote repository'''
822 822 cachefile = _getstorehashcachename(remotepath)
823 823 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
824 824
825 825 def _cachestorehash(self, remotepath):
826 826 '''cache the current store hash
827 827
828 828 Each remote repo requires its own store hash cache, because a subrepo
829 829 store may be "clean" versus a given remote repo, but not versus another
830 830 '''
831 831 cachefile = _getstorehashcachename(remotepath)
832 832 with self._repo.lock():
833 833 storehash = list(self._calcstorehash(remotepath))
834 834 vfs = self._cachestorehashvfs
835 835 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
836 836
837 837 def _getctx(self):
838 838 '''fetch the context for this subrepo revision, possibly a workingctx
839 839 '''
840 840 if self._ctx.rev() is None:
841 841 return self._repo[None] # workingctx if parent is workingctx
842 842 else:
843 843 rev = self._state[1]
844 844 return self._repo[rev]
845 845
846 846 @annotatesubrepoerror
847 847 def _initrepo(self, parentrepo, source, create):
848 848 self._repo._subparent = parentrepo
849 849 self._repo._subsource = source
850 850
851 851 if create:
852 852 lines = ['[paths]\n']
853 853
854 854 def addpathconfig(key, value):
855 855 if value:
856 856 lines.append('%s = %s\n' % (key, value))
857 857 self.ui.setconfig('paths', key, value, 'subrepo')
858 858
859 859 defpath = _abssource(self._repo, abort=False)
860 860 defpushpath = _abssource(self._repo, True, abort=False)
861 861 addpathconfig('default', defpath)
862 862 if defpath != defpushpath:
863 863 addpathconfig('default-push', defpushpath)
864 864
865 865 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
866 866
867 867 @annotatesubrepoerror
868 868 def add(self, ui, match, prefix, explicitonly, **opts):
869 869 return cmdutil.add(ui, self._repo, match,
870 870 self.wvfs.reljoin(prefix, self._path),
871 871 explicitonly, **opts)
872 872
873 873 @annotatesubrepoerror
874 874 def addremove(self, m, prefix, opts, dry_run, similarity):
875 875 # In the same way as sub directories are processed, once in a subrepo,
876 876 # always entry any of its subrepos. Don't corrupt the options that will
877 877 # be used to process sibling subrepos however.
878 878 opts = copy.copy(opts)
879 879 opts['subrepos'] = True
880 880 return scmutil.addremove(self._repo, m,
881 881 self.wvfs.reljoin(prefix, self._path), opts,
882 882 dry_run, similarity)
883 883
884 884 @annotatesubrepoerror
885 885 def cat(self, match, fm, fntemplate, prefix, **opts):
886 886 rev = self._state[1]
887 887 ctx = self._repo[rev]
888 888 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
889 889 prefix, **opts)
890 890
891 891 @annotatesubrepoerror
892 892 def status(self, rev2, **opts):
893 893 try:
894 894 rev1 = self._state[1]
895 895 ctx1 = self._repo[rev1]
896 896 ctx2 = self._repo[rev2]
897 897 return self._repo.status(ctx1, ctx2, **opts)
898 898 except error.RepoLookupError as inst:
899 899 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
900 900 % (inst, subrelpath(self)))
901 901 return scmutil.status([], [], [], [], [], [], [])
902 902
903 903 @annotatesubrepoerror
904 904 def diff(self, ui, diffopts, node2, match, prefix, **opts):
905 905 try:
906 906 node1 = node.bin(self._state[1])
907 907 # We currently expect node2 to come from substate and be
908 908 # in hex format
909 909 if node2 is not None:
910 910 node2 = node.bin(node2)
911 911 logcmdutil.diffordiffstat(ui, self._repo, diffopts,
912 912 node1, node2, match,
913 913 prefix=posixpath.join(prefix, self._path),
914 914 listsubrepos=True, **opts)
915 915 except error.RepoLookupError as inst:
916 916 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
917 917 % (inst, subrelpath(self)))
918 918
919 919 @annotatesubrepoerror
920 920 def archive(self, archiver, prefix, match=None, decode=True):
921 921 self._get(self._state + ('hg',))
922 total = abstractsubrepo.archive(self, archiver, prefix, match)
922 files = self.files()
923 if match:
924 files = [f for f in files if match(f)]
923 925 rev = self._state[1]
924 926 ctx = self._repo[rev]
927 cmdutil._prefetchfiles(self._repo, ctx, files)
928 total = abstractsubrepo.archive(self, archiver, prefix, match)
925 929 for subpath in ctx.substate:
926 930 s = subrepo(ctx, subpath, True)
927 931 submatch = matchmod.subdirmatcher(subpath, match)
928 932 total += s.archive(archiver, prefix + self._path + '/', submatch,
929 933 decode)
930 934 return total
931 935
932 936 @annotatesubrepoerror
933 937 def dirty(self, ignoreupdate=False, missing=False):
934 938 r = self._state[1]
935 939 if r == '' and not ignoreupdate: # no state recorded
936 940 return True
937 941 w = self._repo[None]
938 942 if r != w.p1().hex() and not ignoreupdate:
939 943 # different version checked out
940 944 return True
941 945 return w.dirty(missing=missing) # working directory changed
942 946
943 947 def basestate(self):
944 948 return self._repo['.'].hex()
945 949
946 950 def checknested(self, path):
947 951 return self._repo._checknested(self._repo.wjoin(path))
948 952
949 953 @annotatesubrepoerror
950 954 def commit(self, text, user, date):
951 955 # don't bother committing in the subrepo if it's only been
952 956 # updated
953 957 if not self.dirty(True):
954 958 return self._repo['.'].hex()
955 959 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
956 960 n = self._repo.commit(text, user, date)
957 961 if not n:
958 962 return self._repo['.'].hex() # different version checked out
959 963 return node.hex(n)
960 964
961 965 @annotatesubrepoerror
962 966 def phase(self, state):
963 967 return self._repo[state].phase()
964 968
965 969 @annotatesubrepoerror
966 970 def remove(self):
967 971 # we can't fully delete the repository as it may contain
968 972 # local-only history
969 973 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
970 974 hg.clean(self._repo, node.nullid, False)
971 975
972 976 def _get(self, state):
973 977 source, revision, kind = state
974 978 parentrepo = self._repo._subparent
975 979
976 980 if revision in self._repo.unfiltered():
977 981 # Allow shared subrepos tracked at null to setup the sharedpath
978 982 if len(self._repo) != 0 or not parentrepo.shared():
979 983 return True
980 984 self._repo._subsource = source
981 985 srcurl = _abssource(self._repo)
982 986 other = hg.peer(self._repo, {}, srcurl)
983 987 if len(self._repo) == 0:
984 988 # use self._repo.vfs instead of self.wvfs to remove .hg only
985 989 self._repo.vfs.rmtree()
986 990 if parentrepo.shared():
987 991 self.ui.status(_('sharing subrepo %s from %s\n')
988 992 % (subrelpath(self), srcurl))
989 993 shared = hg.share(self._repo._subparent.baseui,
990 994 other, self._repo.root,
991 995 update=False, bookmarks=False)
992 996 self._repo = shared.local()
993 997 else:
994 998 self.ui.status(_('cloning subrepo %s from %s\n')
995 999 % (subrelpath(self), srcurl))
996 1000 other, cloned = hg.clone(self._repo._subparent.baseui, {},
997 1001 other, self._repo.root,
998 1002 update=False)
999 1003 self._repo = cloned.local()
1000 1004 self._initrepo(parentrepo, source, create=True)
1001 1005 self._cachestorehash(srcurl)
1002 1006 else:
1003 1007 self.ui.status(_('pulling subrepo %s from %s\n')
1004 1008 % (subrelpath(self), srcurl))
1005 1009 cleansub = self.storeclean(srcurl)
1006 1010 exchange.pull(self._repo, other)
1007 1011 if cleansub:
1008 1012 # keep the repo clean after pull
1009 1013 self._cachestorehash(srcurl)
1010 1014 return False
1011 1015
1012 1016 @annotatesubrepoerror
1013 1017 def get(self, state, overwrite=False):
1014 1018 inrepo = self._get(state)
1015 1019 source, revision, kind = state
1016 1020 repo = self._repo
1017 1021 repo.ui.debug("getting subrepo %s\n" % self._path)
1018 1022 if inrepo:
1019 1023 urepo = repo.unfiltered()
1020 1024 ctx = urepo[revision]
1021 1025 if ctx.hidden():
1022 1026 urepo.ui.warn(
1023 1027 _('revision %s in subrepository "%s" is hidden\n') \
1024 1028 % (revision[0:12], self._path))
1025 1029 repo = urepo
1026 1030 hg.updaterepo(repo, revision, overwrite)
1027 1031
1028 1032 @annotatesubrepoerror
1029 1033 def merge(self, state):
1030 1034 self._get(state)
1031 1035 cur = self._repo['.']
1032 1036 dst = self._repo[state[1]]
1033 1037 anc = dst.ancestor(cur)
1034 1038
1035 1039 def mergefunc():
1036 1040 if anc == cur and dst.branch() == cur.branch():
1037 1041 self.ui.debug('updating subrepository "%s"\n'
1038 1042 % subrelpath(self))
1039 1043 hg.update(self._repo, state[1])
1040 1044 elif anc == dst:
1041 1045 self.ui.debug('skipping subrepository "%s"\n'
1042 1046 % subrelpath(self))
1043 1047 else:
1044 1048 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
1045 1049 hg.merge(self._repo, state[1], remind=False)
1046 1050
1047 1051 wctx = self._repo[None]
1048 1052 if self.dirty():
1049 1053 if anc != dst:
1050 1054 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
1051 1055 mergefunc()
1052 1056 else:
1053 1057 mergefunc()
1054 1058 else:
1055 1059 mergefunc()
1056 1060
1057 1061 @annotatesubrepoerror
1058 1062 def push(self, opts):
1059 1063 force = opts.get('force')
1060 1064 newbranch = opts.get('new_branch')
1061 1065 ssh = opts.get('ssh')
1062 1066
1063 1067 # push subrepos depth-first for coherent ordering
1064 1068 c = self._repo['']
1065 1069 subs = c.substate # only repos that are committed
1066 1070 for s in sorted(subs):
1067 1071 if c.sub(s).push(opts) == 0:
1068 1072 return False
1069 1073
1070 1074 dsturl = _abssource(self._repo, True)
1071 1075 if not force:
1072 1076 if self.storeclean(dsturl):
1073 1077 self.ui.status(
1074 1078 _('no changes made to subrepo %s since last push to %s\n')
1075 1079 % (subrelpath(self), dsturl))
1076 1080 return None
1077 1081 self.ui.status(_('pushing subrepo %s to %s\n') %
1078 1082 (subrelpath(self), dsturl))
1079 1083 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
1080 1084 res = exchange.push(self._repo, other, force, newbranch=newbranch)
1081 1085
1082 1086 # the repo is now clean
1083 1087 self._cachestorehash(dsturl)
1084 1088 return res.cgresult
1085 1089
1086 1090 @annotatesubrepoerror
1087 1091 def outgoing(self, ui, dest, opts):
1088 1092 if 'rev' in opts or 'branch' in opts:
1089 1093 opts = copy.copy(opts)
1090 1094 opts.pop('rev', None)
1091 1095 opts.pop('branch', None)
1092 1096 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
1093 1097
1094 1098 @annotatesubrepoerror
1095 1099 def incoming(self, ui, source, opts):
1096 1100 if 'rev' in opts or 'branch' in opts:
1097 1101 opts = copy.copy(opts)
1098 1102 opts.pop('rev', None)
1099 1103 opts.pop('branch', None)
1100 1104 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
1101 1105
1102 1106 @annotatesubrepoerror
1103 1107 def files(self):
1104 1108 rev = self._state[1]
1105 1109 ctx = self._repo[rev]
1106 1110 return ctx.manifest().keys()
1107 1111
1108 1112 def filedata(self, name, decode):
1109 1113 rev = self._state[1]
1110 1114 data = self._repo[rev][name].data()
1111 1115 if decode:
1112 1116 data = self._repo.wwritedata(name, data)
1113 1117 return data
1114 1118
1115 1119 def fileflags(self, name):
1116 1120 rev = self._state[1]
1117 1121 ctx = self._repo[rev]
1118 1122 return ctx.flags(name)
1119 1123
1120 1124 @annotatesubrepoerror
1121 1125 def printfiles(self, ui, m, fm, fmt, subrepos):
1122 1126 # If the parent context is a workingctx, use the workingctx here for
1123 1127 # consistency.
1124 1128 if self._ctx.rev() is None:
1125 1129 ctx = self._repo[None]
1126 1130 else:
1127 1131 rev = self._state[1]
1128 1132 ctx = self._repo[rev]
1129 1133 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
1130 1134
1131 1135 @annotatesubrepoerror
1132 1136 def getfileset(self, expr):
1133 1137 if self._ctx.rev() is None:
1134 1138 ctx = self._repo[None]
1135 1139 else:
1136 1140 rev = self._state[1]
1137 1141 ctx = self._repo[rev]
1138 1142
1139 1143 files = ctx.getfileset(expr)
1140 1144
1141 1145 for subpath in ctx.substate:
1142 1146 sub = ctx.sub(subpath)
1143 1147
1144 1148 try:
1145 1149 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
1146 1150 except error.LookupError:
1147 1151 self.ui.status(_("skipping missing subrepository: %s\n")
1148 1152 % self.wvfs.reljoin(reporelpath(self), subpath))
1149 1153 return files
1150 1154
1151 1155 def walk(self, match):
1152 1156 ctx = self._repo[None]
1153 1157 return ctx.walk(match)
1154 1158
1155 1159 @annotatesubrepoerror
1156 1160 def forget(self, match, prefix):
1157 1161 return cmdutil.forget(self.ui, self._repo, match,
1158 1162 self.wvfs.reljoin(prefix, self._path), True)
1159 1163
1160 1164 @annotatesubrepoerror
1161 1165 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1162 1166 return cmdutil.remove(self.ui, self._repo, matcher,
1163 1167 self.wvfs.reljoin(prefix, self._path),
1164 1168 after, force, subrepos)
1165 1169
1166 1170 @annotatesubrepoerror
1167 1171 def revert(self, substate, *pats, **opts):
1168 1172 # reverting a subrepo is a 2 step process:
1169 1173 # 1. if the no_backup is not set, revert all modified
1170 1174 # files inside the subrepo
1171 1175 # 2. update the subrepo to the revision specified in
1172 1176 # the corresponding substate dictionary
1173 1177 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1174 1178 if not opts.get(r'no_backup'):
1175 1179 # Revert all files on the subrepo, creating backups
1176 1180 # Note that this will not recursively revert subrepos
1177 1181 # We could do it if there was a set:subrepos() predicate
1178 1182 opts = opts.copy()
1179 1183 opts[r'date'] = None
1180 1184 opts[r'rev'] = substate[1]
1181 1185
1182 1186 self.filerevert(*pats, **opts)
1183 1187
1184 1188 # Update the repo to the revision specified in the given substate
1185 1189 if not opts.get(r'dry_run'):
1186 1190 self.get(substate, overwrite=True)
1187 1191
1188 1192 def filerevert(self, *pats, **opts):
1189 1193 ctx = self._repo[opts[r'rev']]
1190 1194 parents = self._repo.dirstate.parents()
1191 1195 if opts.get(r'all'):
1192 1196 pats = ['set:modified()']
1193 1197 else:
1194 1198 pats = []
1195 1199 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1196 1200
1197 1201 def shortid(self, revid):
1198 1202 return revid[:12]
1199 1203
1200 1204 @annotatesubrepoerror
1201 1205 def unshare(self):
1202 1206 # subrepo inherently violates our import layering rules
1203 1207 # because it wants to make repo objects from deep inside the stack
1204 1208 # so we manually delay the circular imports to not break
1205 1209 # scripts that don't use our demand-loading
1206 1210 global hg
1207 1211 from . import hg as h
1208 1212 hg = h
1209 1213
1210 1214 # Nothing prevents a user from sharing in a repo, and then making that a
1211 1215 # subrepo. Alternately, the previous unshare attempt may have failed
1212 1216 # part way through. So recurse whether or not this layer is shared.
1213 1217 if self._repo.shared():
1214 1218 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
1215 1219
1216 1220 hg.unshare(self.ui, self._repo)
1217 1221
1218 1222 def verify(self):
1219 1223 try:
1220 1224 rev = self._state[1]
1221 1225 ctx = self._repo.unfiltered()[rev]
1222 1226 if ctx.hidden():
1223 1227 # Since hidden revisions aren't pushed/pulled, it seems worth an
1224 1228 # explicit warning.
1225 1229 ui = self._repo.ui
1226 1230 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1227 1231 (self._relpath, node.short(self._ctx.node())))
1228 1232 return 0
1229 1233 except error.RepoLookupError:
1230 1234 # A missing subrepo revision may be a case of needing to pull it, so
1231 1235 # don't treat this as an error.
1232 1236 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1233 1237 (self._relpath, node.short(self._ctx.node())))
1234 1238 return 0
1235 1239
1236 1240 @propertycache
1237 1241 def wvfs(self):
1238 1242 """return own wvfs for efficiency and consistency
1239 1243 """
1240 1244 return self._repo.wvfs
1241 1245
1242 1246 @propertycache
1243 1247 def _relpath(self):
1244 1248 """return path to this subrepository as seen from outermost repository
1245 1249 """
1246 1250 # Keep consistent dir separators by avoiding vfs.join(self._path)
1247 1251 return reporelpath(self._repo)
1248 1252
1249 1253 class svnsubrepo(abstractsubrepo):
1250 1254 def __init__(self, ctx, path, state, allowcreate):
1251 1255 super(svnsubrepo, self).__init__(ctx, path)
1252 1256 self._state = state
1253 1257 self._exe = util.findexe('svn')
1254 1258 if not self._exe:
1255 1259 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1256 1260 % self._path)
1257 1261
1258 1262 def _svncommand(self, commands, filename='', failok=False):
1259 1263 cmd = [self._exe]
1260 1264 extrakw = {}
1261 1265 if not self.ui.interactive():
1262 1266 # Making stdin be a pipe should prevent svn from behaving
1263 1267 # interactively even if we can't pass --non-interactive.
1264 1268 extrakw[r'stdin'] = subprocess.PIPE
1265 1269 # Starting in svn 1.5 --non-interactive is a global flag
1266 1270 # instead of being per-command, but we need to support 1.4 so
1267 1271 # we have to be intelligent about what commands take
1268 1272 # --non-interactive.
1269 1273 if commands[0] in ('update', 'checkout', 'commit'):
1270 1274 cmd.append('--non-interactive')
1271 1275 cmd.extend(commands)
1272 1276 if filename is not None:
1273 1277 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1274 1278 self._path, filename)
1275 1279 cmd.append(path)
1276 1280 env = dict(encoding.environ)
1277 1281 # Avoid localized output, preserve current locale for everything else.
1278 1282 lc_all = env.get('LC_ALL')
1279 1283 if lc_all:
1280 1284 env['LANG'] = lc_all
1281 1285 del env['LC_ALL']
1282 1286 env['LC_MESSAGES'] = 'C'
1283 1287 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1284 1288 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1285 1289 universal_newlines=True, env=env, **extrakw)
1286 1290 stdout, stderr = p.communicate()
1287 1291 stderr = stderr.strip()
1288 1292 if not failok:
1289 1293 if p.returncode:
1290 1294 raise error.Abort(stderr or 'exited with code %d'
1291 1295 % p.returncode)
1292 1296 if stderr:
1293 1297 self.ui.warn(stderr + '\n')
1294 1298 return stdout, stderr
1295 1299
1296 1300 @propertycache
1297 1301 def _svnversion(self):
1298 1302 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1299 1303 m = re.search(br'^(\d+)\.(\d+)', output)
1300 1304 if not m:
1301 1305 raise error.Abort(_('cannot retrieve svn tool version'))
1302 1306 return (int(m.group(1)), int(m.group(2)))
1303 1307
1304 1308 def _svnmissing(self):
1305 1309 return not self.wvfs.exists('.svn')
1306 1310
1307 1311 def _wcrevs(self):
1308 1312 # Get the working directory revision as well as the last
1309 1313 # commit revision so we can compare the subrepo state with
1310 1314 # both. We used to store the working directory one.
1311 1315 output, err = self._svncommand(['info', '--xml'])
1312 1316 doc = xml.dom.minidom.parseString(output)
1313 1317 entries = doc.getElementsByTagName('entry')
1314 1318 lastrev, rev = '0', '0'
1315 1319 if entries:
1316 1320 rev = str(entries[0].getAttribute('revision')) or '0'
1317 1321 commits = entries[0].getElementsByTagName('commit')
1318 1322 if commits:
1319 1323 lastrev = str(commits[0].getAttribute('revision')) or '0'
1320 1324 return (lastrev, rev)
1321 1325
1322 1326 def _wcrev(self):
1323 1327 return self._wcrevs()[0]
1324 1328
1325 1329 def _wcchanged(self):
1326 1330 """Return (changes, extchanges, missing) where changes is True
1327 1331 if the working directory was changed, extchanges is
1328 1332 True if any of these changes concern an external entry and missing
1329 1333 is True if any change is a missing entry.
1330 1334 """
1331 1335 output, err = self._svncommand(['status', '--xml'])
1332 1336 externals, changes, missing = [], [], []
1333 1337 doc = xml.dom.minidom.parseString(output)
1334 1338 for e in doc.getElementsByTagName('entry'):
1335 1339 s = e.getElementsByTagName('wc-status')
1336 1340 if not s:
1337 1341 continue
1338 1342 item = s[0].getAttribute('item')
1339 1343 props = s[0].getAttribute('props')
1340 1344 path = e.getAttribute('path')
1341 1345 if item == 'external':
1342 1346 externals.append(path)
1343 1347 elif item == 'missing':
1344 1348 missing.append(path)
1345 1349 if (item not in ('', 'normal', 'unversioned', 'external')
1346 1350 or props not in ('', 'none', 'normal')):
1347 1351 changes.append(path)
1348 1352 for path in changes:
1349 1353 for ext in externals:
1350 1354 if path == ext or path.startswith(ext + pycompat.ossep):
1351 1355 return True, True, bool(missing)
1352 1356 return bool(changes), False, bool(missing)
1353 1357
1354 1358 @annotatesubrepoerror
1355 1359 def dirty(self, ignoreupdate=False, missing=False):
1356 1360 if self._svnmissing():
1357 1361 return self._state[1] != ''
1358 1362 wcchanged = self._wcchanged()
1359 1363 changed = wcchanged[0] or (missing and wcchanged[2])
1360 1364 if not changed:
1361 1365 if self._state[1] in self._wcrevs() or ignoreupdate:
1362 1366 return False
1363 1367 return True
1364 1368
1365 1369 def basestate(self):
1366 1370 lastrev, rev = self._wcrevs()
1367 1371 if lastrev != rev:
1368 1372 # Last committed rev is not the same than rev. We would
1369 1373 # like to take lastrev but we do not know if the subrepo
1370 1374 # URL exists at lastrev. Test it and fallback to rev it
1371 1375 # is not there.
1372 1376 try:
1373 1377 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1374 1378 return lastrev
1375 1379 except error.Abort:
1376 1380 pass
1377 1381 return rev
1378 1382
1379 1383 @annotatesubrepoerror
1380 1384 def commit(self, text, user, date):
1381 1385 # user and date are out of our hands since svn is centralized
1382 1386 changed, extchanged, missing = self._wcchanged()
1383 1387 if not changed:
1384 1388 return self.basestate()
1385 1389 if extchanged:
1386 1390 # Do not try to commit externals
1387 1391 raise error.Abort(_('cannot commit svn externals'))
1388 1392 if missing:
1389 1393 # svn can commit with missing entries but aborting like hg
1390 1394 # seems a better approach.
1391 1395 raise error.Abort(_('cannot commit missing svn entries'))
1392 1396 commitinfo, err = self._svncommand(['commit', '-m', text])
1393 1397 self.ui.status(commitinfo)
1394 1398 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1395 1399 if not newrev:
1396 1400 if not commitinfo.strip():
1397 1401 # Sometimes, our definition of "changed" differs from
1398 1402 # svn one. For instance, svn ignores missing files
1399 1403 # when committing. If there are only missing files, no
1400 1404 # commit is made, no output and no error code.
1401 1405 raise error.Abort(_('failed to commit svn changes'))
1402 1406 raise error.Abort(commitinfo.splitlines()[-1])
1403 1407 newrev = newrev.groups()[0]
1404 1408 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1405 1409 return newrev
1406 1410
1407 1411 @annotatesubrepoerror
1408 1412 def remove(self):
1409 1413 if self.dirty():
1410 1414 self.ui.warn(_('not removing repo %s because '
1411 1415 'it has changes.\n') % self._path)
1412 1416 return
1413 1417 self.ui.note(_('removing subrepo %s\n') % self._path)
1414 1418
1415 1419 self.wvfs.rmtree(forcibly=True)
1416 1420 try:
1417 1421 pwvfs = self._ctx.repo().wvfs
1418 1422 pwvfs.removedirs(pwvfs.dirname(self._path))
1419 1423 except OSError:
1420 1424 pass
1421 1425
1422 1426 @annotatesubrepoerror
1423 1427 def get(self, state, overwrite=False):
1424 1428 if overwrite:
1425 1429 self._svncommand(['revert', '--recursive'])
1426 1430 args = ['checkout']
1427 1431 if self._svnversion >= (1, 5):
1428 1432 args.append('--force')
1429 1433 # The revision must be specified at the end of the URL to properly
1430 1434 # update to a directory which has since been deleted and recreated.
1431 1435 args.append('%s@%s' % (state[0], state[1]))
1432 1436
1433 1437 # SEC: check that the ssh url is safe
1434 1438 util.checksafessh(state[0])
1435 1439
1436 1440 status, err = self._svncommand(args, failok=True)
1437 1441 _sanitize(self.ui, self.wvfs, '.svn')
1438 1442 if not re.search('Checked out revision [0-9]+.', status):
1439 1443 if ('is already a working copy for a different URL' in err
1440 1444 and (self._wcchanged()[:2] == (False, False))):
1441 1445 # obstructed but clean working copy, so just blow it away.
1442 1446 self.remove()
1443 1447 self.get(state, overwrite=False)
1444 1448 return
1445 1449 raise error.Abort((status or err).splitlines()[-1])
1446 1450 self.ui.status(status)
1447 1451
1448 1452 @annotatesubrepoerror
1449 1453 def merge(self, state):
1450 1454 old = self._state[1]
1451 1455 new = state[1]
1452 1456 wcrev = self._wcrev()
1453 1457 if new != wcrev:
1454 1458 dirty = old == wcrev or self._wcchanged()[0]
1455 1459 if _updateprompt(self.ui, self, dirty, wcrev, new):
1456 1460 self.get(state, False)
1457 1461
1458 1462 def push(self, opts):
1459 1463 # push is a no-op for SVN
1460 1464 return True
1461 1465
1462 1466 @annotatesubrepoerror
1463 1467 def files(self):
1464 1468 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1465 1469 doc = xml.dom.minidom.parseString(output)
1466 1470 paths = []
1467 1471 for e in doc.getElementsByTagName('entry'):
1468 1472 kind = str(e.getAttribute('kind'))
1469 1473 if kind != 'file':
1470 1474 continue
1471 1475 name = ''.join(c.data for c
1472 1476 in e.getElementsByTagName('name')[0].childNodes
1473 1477 if c.nodeType == c.TEXT_NODE)
1474 1478 paths.append(name.encode('utf-8'))
1475 1479 return paths
1476 1480
1477 1481 def filedata(self, name, decode):
1478 1482 return self._svncommand(['cat'], name)[0]
1479 1483
1480 1484
1481 1485 class gitsubrepo(abstractsubrepo):
1482 1486 def __init__(self, ctx, path, state, allowcreate):
1483 1487 super(gitsubrepo, self).__init__(ctx, path)
1484 1488 self._state = state
1485 1489 self._abspath = ctx.repo().wjoin(path)
1486 1490 self._subparent = ctx.repo()
1487 1491 self._ensuregit()
1488 1492
1489 1493 def _ensuregit(self):
1490 1494 try:
1491 1495 self._gitexecutable = 'git'
1492 1496 out, err = self._gitnodir(['--version'])
1493 1497 except OSError as e:
1494 1498 genericerror = _("error executing git for subrepo '%s': %s")
1495 1499 notfoundhint = _("check git is installed and in your PATH")
1496 1500 if e.errno != errno.ENOENT:
1497 1501 raise error.Abort(genericerror % (
1498 1502 self._path, encoding.strtolocal(e.strerror)))
1499 1503 elif pycompat.iswindows:
1500 1504 try:
1501 1505 self._gitexecutable = 'git.cmd'
1502 1506 out, err = self._gitnodir(['--version'])
1503 1507 except OSError as e2:
1504 1508 if e2.errno == errno.ENOENT:
1505 1509 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1506 1510 " for subrepo '%s'") % self._path,
1507 1511 hint=notfoundhint)
1508 1512 else:
1509 1513 raise error.Abort(genericerror % (self._path,
1510 1514 encoding.strtolocal(e2.strerror)))
1511 1515 else:
1512 1516 raise error.Abort(_("couldn't find git for subrepo '%s'")
1513 1517 % self._path, hint=notfoundhint)
1514 1518 versionstatus = self._checkversion(out)
1515 1519 if versionstatus == 'unknown':
1516 1520 self.ui.warn(_('cannot retrieve git version\n'))
1517 1521 elif versionstatus == 'abort':
1518 1522 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1519 1523 elif versionstatus == 'warning':
1520 1524 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1521 1525
1522 1526 @staticmethod
1523 1527 def _gitversion(out):
1524 1528 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1525 1529 if m:
1526 1530 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1527 1531
1528 1532 m = re.search(br'^git version (\d+)\.(\d+)', out)
1529 1533 if m:
1530 1534 return (int(m.group(1)), int(m.group(2)), 0)
1531 1535
1532 1536 return -1
1533 1537
1534 1538 @staticmethod
1535 1539 def _checkversion(out):
1536 1540 '''ensure git version is new enough
1537 1541
1538 1542 >>> _checkversion = gitsubrepo._checkversion
1539 1543 >>> _checkversion(b'git version 1.6.0')
1540 1544 'ok'
1541 1545 >>> _checkversion(b'git version 1.8.5')
1542 1546 'ok'
1543 1547 >>> _checkversion(b'git version 1.4.0')
1544 1548 'abort'
1545 1549 >>> _checkversion(b'git version 1.5.0')
1546 1550 'warning'
1547 1551 >>> _checkversion(b'git version 1.9-rc0')
1548 1552 'ok'
1549 1553 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1550 1554 'ok'
1551 1555 >>> _checkversion(b'git version 1.9.0.GIT')
1552 1556 'ok'
1553 1557 >>> _checkversion(b'git version 12345')
1554 1558 'unknown'
1555 1559 >>> _checkversion(b'no')
1556 1560 'unknown'
1557 1561 '''
1558 1562 version = gitsubrepo._gitversion(out)
1559 1563 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1560 1564 # despite the docstring comment. For now, error on 1.4.0, warn on
1561 1565 # 1.5.0 but attempt to continue.
1562 1566 if version == -1:
1563 1567 return 'unknown'
1564 1568 if version < (1, 5, 0):
1565 1569 return 'abort'
1566 1570 elif version < (1, 6, 0):
1567 1571 return 'warning'
1568 1572 return 'ok'
1569 1573
1570 1574 def _gitcommand(self, commands, env=None, stream=False):
1571 1575 return self._gitdir(commands, env=env, stream=stream)[0]
1572 1576
1573 1577 def _gitdir(self, commands, env=None, stream=False):
1574 1578 return self._gitnodir(commands, env=env, stream=stream,
1575 1579 cwd=self._abspath)
1576 1580
1577 1581 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1578 1582 """Calls the git command
1579 1583
1580 1584 The methods tries to call the git command. versions prior to 1.6.0
1581 1585 are not supported and very probably fail.
1582 1586 """
1583 1587 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1584 1588 if env is None:
1585 1589 env = encoding.environ.copy()
1586 1590 # disable localization for Git output (issue5176)
1587 1591 env['LC_ALL'] = 'C'
1588 1592 # fix for Git CVE-2015-7545
1589 1593 if 'GIT_ALLOW_PROTOCOL' not in env:
1590 1594 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1591 1595 # unless ui.quiet is set, print git's stderr,
1592 1596 # which is mostly progress and useful info
1593 1597 errpipe = None
1594 1598 if self.ui.quiet:
1595 1599 errpipe = open(os.devnull, 'w')
1596 1600 if self.ui._colormode and len(commands) and commands[0] == "diff":
1597 1601 # insert the argument in the front,
1598 1602 # the end of git diff arguments is used for paths
1599 1603 commands.insert(1, '--color')
1600 1604 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1601 1605 cwd=cwd, env=env, close_fds=util.closefds,
1602 1606 stdout=subprocess.PIPE, stderr=errpipe)
1603 1607 if stream:
1604 1608 return p.stdout, None
1605 1609
1606 1610 retdata = p.stdout.read().strip()
1607 1611 # wait for the child to exit to avoid race condition.
1608 1612 p.wait()
1609 1613
1610 1614 if p.returncode != 0 and p.returncode != 1:
1611 1615 # there are certain error codes that are ok
1612 1616 command = commands[0]
1613 1617 if command in ('cat-file', 'symbolic-ref'):
1614 1618 return retdata, p.returncode
1615 1619 # for all others, abort
1616 1620 raise error.Abort(_('git %s error %d in %s') %
1617 1621 (command, p.returncode, self._relpath))
1618 1622
1619 1623 return retdata, p.returncode
1620 1624
1621 1625 def _gitmissing(self):
1622 1626 return not self.wvfs.exists('.git')
1623 1627
1624 1628 def _gitstate(self):
1625 1629 return self._gitcommand(['rev-parse', 'HEAD'])
1626 1630
1627 1631 def _gitcurrentbranch(self):
1628 1632 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1629 1633 if err:
1630 1634 current = None
1631 1635 return current
1632 1636
1633 1637 def _gitremote(self, remote):
1634 1638 out = self._gitcommand(['remote', 'show', '-n', remote])
1635 1639 line = out.split('\n')[1]
1636 1640 i = line.index('URL: ') + len('URL: ')
1637 1641 return line[i:]
1638 1642
1639 1643 def _githavelocally(self, revision):
1640 1644 out, code = self._gitdir(['cat-file', '-e', revision])
1641 1645 return code == 0
1642 1646
1643 1647 def _gitisancestor(self, r1, r2):
1644 1648 base = self._gitcommand(['merge-base', r1, r2])
1645 1649 return base == r1
1646 1650
1647 1651 def _gitisbare(self):
1648 1652 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1649 1653
1650 1654 def _gitupdatestat(self):
1651 1655 """This must be run before git diff-index.
1652 1656 diff-index only looks at changes to file stat;
1653 1657 this command looks at file contents and updates the stat."""
1654 1658 self._gitcommand(['update-index', '-q', '--refresh'])
1655 1659
1656 1660 def _gitbranchmap(self):
1657 1661 '''returns 2 things:
1658 1662 a map from git branch to revision
1659 1663 a map from revision to branches'''
1660 1664 branch2rev = {}
1661 1665 rev2branch = {}
1662 1666
1663 1667 out = self._gitcommand(['for-each-ref', '--format',
1664 1668 '%(objectname) %(refname)'])
1665 1669 for line in out.split('\n'):
1666 1670 revision, ref = line.split(' ')
1667 1671 if (not ref.startswith('refs/heads/') and
1668 1672 not ref.startswith('refs/remotes/')):
1669 1673 continue
1670 1674 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1671 1675 continue # ignore remote/HEAD redirects
1672 1676 branch2rev[ref] = revision
1673 1677 rev2branch.setdefault(revision, []).append(ref)
1674 1678 return branch2rev, rev2branch
1675 1679
1676 1680 def _gittracking(self, branches):
1677 1681 'return map of remote branch to local tracking branch'
1678 1682 # assumes no more than one local tracking branch for each remote
1679 1683 tracking = {}
1680 1684 for b in branches:
1681 1685 if b.startswith('refs/remotes/'):
1682 1686 continue
1683 1687 bname = b.split('/', 2)[2]
1684 1688 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1685 1689 if remote:
1686 1690 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1687 1691 tracking['refs/remotes/%s/%s' %
1688 1692 (remote, ref.split('/', 2)[2])] = b
1689 1693 return tracking
1690 1694
1691 1695 def _abssource(self, source):
1692 1696 if '://' not in source:
1693 1697 # recognize the scp syntax as an absolute source
1694 1698 colon = source.find(':')
1695 1699 if colon != -1 and '/' not in source[:colon]:
1696 1700 return source
1697 1701 self._subsource = source
1698 1702 return _abssource(self)
1699 1703
1700 1704 def _fetch(self, source, revision):
1701 1705 if self._gitmissing():
1702 1706 # SEC: check for safe ssh url
1703 1707 util.checksafessh(source)
1704 1708
1705 1709 source = self._abssource(source)
1706 1710 self.ui.status(_('cloning subrepo %s from %s\n') %
1707 1711 (self._relpath, source))
1708 1712 self._gitnodir(['clone', source, self._abspath])
1709 1713 if self._githavelocally(revision):
1710 1714 return
1711 1715 self.ui.status(_('pulling subrepo %s from %s\n') %
1712 1716 (self._relpath, self._gitremote('origin')))
1713 1717 # try only origin: the originally cloned repo
1714 1718 self._gitcommand(['fetch'])
1715 1719 if not self._githavelocally(revision):
1716 1720 raise error.Abort(_('revision %s does not exist in subrepository '
1717 1721 '"%s"\n') % (revision, self._relpath))
1718 1722
1719 1723 @annotatesubrepoerror
1720 1724 def dirty(self, ignoreupdate=False, missing=False):
1721 1725 if self._gitmissing():
1722 1726 return self._state[1] != ''
1723 1727 if self._gitisbare():
1724 1728 return True
1725 1729 if not ignoreupdate and self._state[1] != self._gitstate():
1726 1730 # different version checked out
1727 1731 return True
1728 1732 # check for staged changes or modified files; ignore untracked files
1729 1733 self._gitupdatestat()
1730 1734 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1731 1735 return code == 1
1732 1736
1733 1737 def basestate(self):
1734 1738 return self._gitstate()
1735 1739
1736 1740 @annotatesubrepoerror
1737 1741 def get(self, state, overwrite=False):
1738 1742 source, revision, kind = state
1739 1743 if not revision:
1740 1744 self.remove()
1741 1745 return
1742 1746 self._fetch(source, revision)
1743 1747 # if the repo was set to be bare, unbare it
1744 1748 if self._gitisbare():
1745 1749 self._gitcommand(['config', 'core.bare', 'false'])
1746 1750 if self._gitstate() == revision:
1747 1751 self._gitcommand(['reset', '--hard', 'HEAD'])
1748 1752 return
1749 1753 elif self._gitstate() == revision:
1750 1754 if overwrite:
1751 1755 # first reset the index to unmark new files for commit, because
1752 1756 # reset --hard will otherwise throw away files added for commit,
1753 1757 # not just unmark them.
1754 1758 self._gitcommand(['reset', 'HEAD'])
1755 1759 self._gitcommand(['reset', '--hard', 'HEAD'])
1756 1760 return
1757 1761 branch2rev, rev2branch = self._gitbranchmap()
1758 1762
1759 1763 def checkout(args):
1760 1764 cmd = ['checkout']
1761 1765 if overwrite:
1762 1766 # first reset the index to unmark new files for commit, because
1763 1767 # the -f option will otherwise throw away files added for
1764 1768 # commit, not just unmark them.
1765 1769 self._gitcommand(['reset', 'HEAD'])
1766 1770 cmd.append('-f')
1767 1771 self._gitcommand(cmd + args)
1768 1772 _sanitize(self.ui, self.wvfs, '.git')
1769 1773
1770 1774 def rawcheckout():
1771 1775 # no branch to checkout, check it out with no branch
1772 1776 self.ui.warn(_('checking out detached HEAD in '
1773 1777 'subrepository "%s"\n') % self._relpath)
1774 1778 self.ui.warn(_('check out a git branch if you intend '
1775 1779 'to make changes\n'))
1776 1780 checkout(['-q', revision])
1777 1781
1778 1782 if revision not in rev2branch:
1779 1783 rawcheckout()
1780 1784 return
1781 1785 branches = rev2branch[revision]
1782 1786 firstlocalbranch = None
1783 1787 for b in branches:
1784 1788 if b == 'refs/heads/master':
1785 1789 # master trumps all other branches
1786 1790 checkout(['refs/heads/master'])
1787 1791 return
1788 1792 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1789 1793 firstlocalbranch = b
1790 1794 if firstlocalbranch:
1791 1795 checkout([firstlocalbranch])
1792 1796 return
1793 1797
1794 1798 tracking = self._gittracking(branch2rev.keys())
1795 1799 # choose a remote branch already tracked if possible
1796 1800 remote = branches[0]
1797 1801 if remote not in tracking:
1798 1802 for b in branches:
1799 1803 if b in tracking:
1800 1804 remote = b
1801 1805 break
1802 1806
1803 1807 if remote not in tracking:
1804 1808 # create a new local tracking branch
1805 1809 local = remote.split('/', 3)[3]
1806 1810 checkout(['-b', local, remote])
1807 1811 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1808 1812 # When updating to a tracked remote branch,
1809 1813 # if the local tracking branch is downstream of it,
1810 1814 # a normal `git pull` would have performed a "fast-forward merge"
1811 1815 # which is equivalent to updating the local branch to the remote.
1812 1816 # Since we are only looking at branching at update, we need to
1813 1817 # detect this situation and perform this action lazily.
1814 1818 if tracking[remote] != self._gitcurrentbranch():
1815 1819 checkout([tracking[remote]])
1816 1820 self._gitcommand(['merge', '--ff', remote])
1817 1821 _sanitize(self.ui, self.wvfs, '.git')
1818 1822 else:
1819 1823 # a real merge would be required, just checkout the revision
1820 1824 rawcheckout()
1821 1825
1822 1826 @annotatesubrepoerror
1823 1827 def commit(self, text, user, date):
1824 1828 if self._gitmissing():
1825 1829 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1826 1830 cmd = ['commit', '-a', '-m', text]
1827 1831 env = encoding.environ.copy()
1828 1832 if user:
1829 1833 cmd += ['--author', user]
1830 1834 if date:
1831 1835 # git's date parser silently ignores when seconds < 1e9
1832 1836 # convert to ISO8601
1833 1837 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1834 1838 '%Y-%m-%dT%H:%M:%S %1%2')
1835 1839 self._gitcommand(cmd, env=env)
1836 1840 # make sure commit works otherwise HEAD might not exist under certain
1837 1841 # circumstances
1838 1842 return self._gitstate()
1839 1843
1840 1844 @annotatesubrepoerror
1841 1845 def merge(self, state):
1842 1846 source, revision, kind = state
1843 1847 self._fetch(source, revision)
1844 1848 base = self._gitcommand(['merge-base', revision, self._state[1]])
1845 1849 self._gitupdatestat()
1846 1850 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1847 1851
1848 1852 def mergefunc():
1849 1853 if base == revision:
1850 1854 self.get(state) # fast forward merge
1851 1855 elif base != self._state[1]:
1852 1856 self._gitcommand(['merge', '--no-commit', revision])
1853 1857 _sanitize(self.ui, self.wvfs, '.git')
1854 1858
1855 1859 if self.dirty():
1856 1860 if self._gitstate() != revision:
1857 1861 dirty = self._gitstate() == self._state[1] or code != 0
1858 1862 if _updateprompt(self.ui, self, dirty,
1859 1863 self._state[1][:7], revision[:7]):
1860 1864 mergefunc()
1861 1865 else:
1862 1866 mergefunc()
1863 1867
1864 1868 @annotatesubrepoerror
1865 1869 def push(self, opts):
1866 1870 force = opts.get('force')
1867 1871
1868 1872 if not self._state[1]:
1869 1873 return True
1870 1874 if self._gitmissing():
1871 1875 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1872 1876 # if a branch in origin contains the revision, nothing to do
1873 1877 branch2rev, rev2branch = self._gitbranchmap()
1874 1878 if self._state[1] in rev2branch:
1875 1879 for b in rev2branch[self._state[1]]:
1876 1880 if b.startswith('refs/remotes/origin/'):
1877 1881 return True
1878 1882 for b, revision in branch2rev.iteritems():
1879 1883 if b.startswith('refs/remotes/origin/'):
1880 1884 if self._gitisancestor(self._state[1], revision):
1881 1885 return True
1882 1886 # otherwise, try to push the currently checked out branch
1883 1887 cmd = ['push']
1884 1888 if force:
1885 1889 cmd.append('--force')
1886 1890
1887 1891 current = self._gitcurrentbranch()
1888 1892 if current:
1889 1893 # determine if the current branch is even useful
1890 1894 if not self._gitisancestor(self._state[1], current):
1891 1895 self.ui.warn(_('unrelated git branch checked out '
1892 1896 'in subrepository "%s"\n') % self._relpath)
1893 1897 return False
1894 1898 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1895 1899 (current.split('/', 2)[2], self._relpath))
1896 1900 ret = self._gitdir(cmd + ['origin', current])
1897 1901 return ret[1] == 0
1898 1902 else:
1899 1903 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1900 1904 'cannot push revision %s\n') %
1901 1905 (self._relpath, self._state[1]))
1902 1906 return False
1903 1907
1904 1908 @annotatesubrepoerror
1905 1909 def add(self, ui, match, prefix, explicitonly, **opts):
1906 1910 if self._gitmissing():
1907 1911 return []
1908 1912
1909 1913 (modified, added, removed,
1910 1914 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1911 1915 clean=True)
1912 1916
1913 1917 tracked = set()
1914 1918 # dirstates 'amn' warn, 'r' is added again
1915 1919 for l in (modified, added, deleted, clean):
1916 1920 tracked.update(l)
1917 1921
1918 1922 # Unknown files not of interest will be rejected by the matcher
1919 1923 files = unknown
1920 1924 files.extend(match.files())
1921 1925
1922 1926 rejected = []
1923 1927
1924 1928 files = [f for f in sorted(set(files)) if match(f)]
1925 1929 for f in files:
1926 1930 exact = match.exact(f)
1927 1931 command = ["add"]
1928 1932 if exact:
1929 1933 command.append("-f") #should be added, even if ignored
1930 1934 if ui.verbose or not exact:
1931 1935 ui.status(_('adding %s\n') % match.rel(f))
1932 1936
1933 1937 if f in tracked: # hg prints 'adding' even if already tracked
1934 1938 if exact:
1935 1939 rejected.append(f)
1936 1940 continue
1937 1941 if not opts.get(r'dry_run'):
1938 1942 self._gitcommand(command + [f])
1939 1943
1940 1944 for f in rejected:
1941 1945 ui.warn(_("%s already tracked!\n") % match.abs(f))
1942 1946
1943 1947 return rejected
1944 1948
1945 1949 @annotatesubrepoerror
1946 1950 def remove(self):
1947 1951 if self._gitmissing():
1948 1952 return
1949 1953 if self.dirty():
1950 1954 self.ui.warn(_('not removing repo %s because '
1951 1955 'it has changes.\n') % self._relpath)
1952 1956 return
1953 1957 # we can't fully delete the repository as it may contain
1954 1958 # local-only history
1955 1959 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1956 1960 self._gitcommand(['config', 'core.bare', 'true'])
1957 1961 for f, kind in self.wvfs.readdir():
1958 1962 if f == '.git':
1959 1963 continue
1960 1964 if kind == stat.S_IFDIR:
1961 1965 self.wvfs.rmtree(f)
1962 1966 else:
1963 1967 self.wvfs.unlink(f)
1964 1968
1965 1969 def archive(self, archiver, prefix, match=None, decode=True):
1966 1970 total = 0
1967 1971 source, revision = self._state
1968 1972 if not revision:
1969 1973 return total
1970 1974 self._fetch(source, revision)
1971 1975
1972 1976 # Parse git's native archive command.
1973 1977 # This should be much faster than manually traversing the trees
1974 1978 # and objects with many subprocess calls.
1975 1979 tarstream = self._gitcommand(['archive', revision], stream=True)
1976 1980 tar = tarfile.open(fileobj=tarstream, mode='r|')
1977 1981 relpath = subrelpath(self)
1978 1982 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1979 1983 for i, info in enumerate(tar):
1980 1984 if info.isdir():
1981 1985 continue
1982 1986 if match and not match(info.name):
1983 1987 continue
1984 1988 if info.issym():
1985 1989 data = info.linkname
1986 1990 else:
1987 1991 data = tar.extractfile(info).read()
1988 1992 archiver.addfile(prefix + self._path + '/' + info.name,
1989 1993 info.mode, info.issym(), data)
1990 1994 total += 1
1991 1995 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1992 1996 unit=_('files'))
1993 1997 self.ui.progress(_('archiving (%s)') % relpath, None)
1994 1998 return total
1995 1999
1996 2000
1997 2001 @annotatesubrepoerror
1998 2002 def cat(self, match, fm, fntemplate, prefix, **opts):
1999 2003 rev = self._state[1]
2000 2004 if match.anypats():
2001 2005 return 1 #No support for include/exclude yet
2002 2006
2003 2007 if not match.files():
2004 2008 return 1
2005 2009
2006 2010 # TODO: add support for non-plain formatter (see cmdutil.cat())
2007 2011 for f in match.files():
2008 2012 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
2009 2013 fp = cmdutil.makefileobj(self._subparent, fntemplate,
2010 2014 self._ctx.node(),
2011 2015 pathname=self.wvfs.reljoin(prefix, f))
2012 2016 fp.write(output)
2013 2017 fp.close()
2014 2018 return 0
2015 2019
2016 2020
2017 2021 @annotatesubrepoerror
2018 2022 def status(self, rev2, **opts):
2019 2023 rev1 = self._state[1]
2020 2024 if self._gitmissing() or not rev1:
2021 2025 # if the repo is missing, return no results
2022 2026 return scmutil.status([], [], [], [], [], [], [])
2023 2027 modified, added, removed = [], [], []
2024 2028 self._gitupdatestat()
2025 2029 if rev2:
2026 2030 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
2027 2031 else:
2028 2032 command = ['diff-index', '--no-renames', rev1]
2029 2033 out = self._gitcommand(command)
2030 2034 for line in out.split('\n'):
2031 2035 tab = line.find('\t')
2032 2036 if tab == -1:
2033 2037 continue
2034 2038 status, f = line[tab - 1], line[tab + 1:]
2035 2039 if status == 'M':
2036 2040 modified.append(f)
2037 2041 elif status == 'A':
2038 2042 added.append(f)
2039 2043 elif status == 'D':
2040 2044 removed.append(f)
2041 2045
2042 2046 deleted, unknown, ignored, clean = [], [], [], []
2043 2047
2044 2048 command = ['status', '--porcelain', '-z']
2045 2049 if opts.get(r'unknown'):
2046 2050 command += ['--untracked-files=all']
2047 2051 if opts.get(r'ignored'):
2048 2052 command += ['--ignored']
2049 2053 out = self._gitcommand(command)
2050 2054
2051 2055 changedfiles = set()
2052 2056 changedfiles.update(modified)
2053 2057 changedfiles.update(added)
2054 2058 changedfiles.update(removed)
2055 2059 for line in out.split('\0'):
2056 2060 if not line:
2057 2061 continue
2058 2062 st = line[0:2]
2059 2063 #moves and copies show 2 files on one line
2060 2064 if line.find('\0') >= 0:
2061 2065 filename1, filename2 = line[3:].split('\0')
2062 2066 else:
2063 2067 filename1 = line[3:]
2064 2068 filename2 = None
2065 2069
2066 2070 changedfiles.add(filename1)
2067 2071 if filename2:
2068 2072 changedfiles.add(filename2)
2069 2073
2070 2074 if st == '??':
2071 2075 unknown.append(filename1)
2072 2076 elif st == '!!':
2073 2077 ignored.append(filename1)
2074 2078
2075 2079 if opts.get(r'clean'):
2076 2080 out = self._gitcommand(['ls-files'])
2077 2081 for f in out.split('\n'):
2078 2082 if not f in changedfiles:
2079 2083 clean.append(f)
2080 2084
2081 2085 return scmutil.status(modified, added, removed, deleted,
2082 2086 unknown, ignored, clean)
2083 2087
2084 2088 @annotatesubrepoerror
2085 2089 def diff(self, ui, diffopts, node2, match, prefix, **opts):
2086 2090 node1 = self._state[1]
2087 2091 cmd = ['diff', '--no-renames']
2088 2092 if opts[r'stat']:
2089 2093 cmd.append('--stat')
2090 2094 else:
2091 2095 # for Git, this also implies '-p'
2092 2096 cmd.append('-U%d' % diffopts.context)
2093 2097
2094 2098 gitprefix = self.wvfs.reljoin(prefix, self._path)
2095 2099
2096 2100 if diffopts.noprefix:
2097 2101 cmd.extend(['--src-prefix=%s/' % gitprefix,
2098 2102 '--dst-prefix=%s/' % gitprefix])
2099 2103 else:
2100 2104 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
2101 2105 '--dst-prefix=b/%s/' % gitprefix])
2102 2106
2103 2107 if diffopts.ignorews:
2104 2108 cmd.append('--ignore-all-space')
2105 2109 if diffopts.ignorewsamount:
2106 2110 cmd.append('--ignore-space-change')
2107 2111 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
2108 2112 and diffopts.ignoreblanklines:
2109 2113 cmd.append('--ignore-blank-lines')
2110 2114
2111 2115 cmd.append(node1)
2112 2116 if node2:
2113 2117 cmd.append(node2)
2114 2118
2115 2119 output = ""
2116 2120 if match.always():
2117 2121 output += self._gitcommand(cmd) + '\n'
2118 2122 else:
2119 2123 st = self.status(node2)[:3]
2120 2124 files = [f for sublist in st for f in sublist]
2121 2125 for f in files:
2122 2126 if match(f):
2123 2127 output += self._gitcommand(cmd + ['--', f]) + '\n'
2124 2128
2125 2129 if output.strip():
2126 2130 ui.write(output)
2127 2131
2128 2132 @annotatesubrepoerror
2129 2133 def revert(self, substate, *pats, **opts):
2130 2134 self.ui.status(_('reverting subrepo %s\n') % substate[0])
2131 2135 if not opts.get(r'no_backup'):
2132 2136 status = self.status(None)
2133 2137 names = status.modified
2134 2138 for name in names:
2135 2139 bakname = scmutil.origpath(self.ui, self._subparent, name)
2136 2140 self.ui.note(_('saving current version of %s as %s\n') %
2137 2141 (name, bakname))
2138 2142 self.wvfs.rename(name, bakname)
2139 2143
2140 2144 if not opts.get(r'dry_run'):
2141 2145 self.get(substate, overwrite=True)
2142 2146 return []
2143 2147
2144 2148 def shortid(self, revid):
2145 2149 return revid[:7]
2146 2150
2147 2151 types = {
2148 2152 'hg': hgsubrepo,
2149 2153 'svn': svnsubrepo,
2150 2154 'git': gitsubrepo,
2151 2155 }
@@ -1,231 +1,259 b''
1 1 #require lfs-test-server
2 2
3 3 $ LFS_LISTEN="tcp://:$HGPORT"
4 4 $ LFS_HOST="localhost:$HGPORT"
5 5 $ LFS_PUBLIC=1
6 6 $ export LFS_LISTEN LFS_HOST LFS_PUBLIC
7 7 #if no-windows
8 8 $ lfs-test-server &> lfs-server.log &
9 9 $ echo $! >> $DAEMON_PIDS
10 10 #else
11 11 $ cat >> $TESTTMP/spawn.py <<EOF
12 12 > import os
13 13 > import subprocess
14 14 > import sys
15 15 >
16 16 > for path in os.environ["PATH"].split(os.pathsep):
17 17 > exe = os.path.join(path, 'lfs-test-server.exe')
18 18 > if os.path.exists(exe):
19 19 > with open('lfs-server.log', 'wb') as out:
20 20 > p = subprocess.Popen(exe, stdout=out, stderr=out)
21 21 > sys.stdout.write('%s\n' % p.pid)
22 22 > sys.exit(0)
23 23 > sys.exit(1)
24 24 > EOF
25 25 $ $PYTHON $TESTTMP/spawn.py >> $DAEMON_PIDS
26 26 #endif
27 27
28 28 $ cat >> $HGRCPATH <<EOF
29 29 > [extensions]
30 30 > lfs=
31 31 > [lfs]
32 32 > url=http://foo:bar@$LFS_HOST/
33 33 > track=all()
34 34 > EOF
35 35
36 36 $ hg init repo1
37 37 $ cd repo1
38 38 $ echo THIS-IS-LFS > a
39 39 $ hg commit -m a -A a
40 40
41 41 A push can be serviced directly from the usercache if it isn't in the local
42 42 store.
43 43
44 44 $ hg init ../repo2
45 45 $ mv .hg/store/lfs .hg/store/lfs_
46 46 $ hg push ../repo2 -v
47 47 pushing to ../repo2
48 48 searching for changes
49 49 lfs: uploading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
50 50 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
51 51 lfs: uploaded 1 files (12 bytes)
52 52 1 changesets found
53 53 uncompressed size of bundle content:
54 54 * (changelog) (glob)
55 55 * (manifests) (glob)
56 56 * a (glob)
57 57 adding changesets
58 58 adding manifests
59 59 adding file changes
60 60 added 1 changesets with 1 changes to 1 files
61 61 calling hook pretxnchangegroup.lfs: hgext.lfs.checkrequireslfs
62 62 $ mv .hg/store/lfs_ .hg/store/lfs
63 63
64 64 Clear the cache to force a download
65 65 $ rm -rf `hg config lfs.usercache`
66 66 $ cd ../repo2
67 67 $ hg update tip -v
68 68 resolving manifests
69 69 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
70 70 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
71 71 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
72 72 getting a
73 73 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
74 74 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
75 75
76 76 When the server has some blobs already
77 77
78 78 $ hg mv a b
79 79 $ echo ANOTHER-LARGE-FILE > c
80 80 $ echo ANOTHER-LARGE-FILE2 > d
81 81 $ hg commit -m b-and-c -A b c d
82 82 $ hg push ../repo1 -v | grep -v '^ '
83 83 pushing to ../repo1
84 84 searching for changes
85 85 lfs: need to transfer 2 objects (39 bytes)
86 86 lfs: uploading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
87 87 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
88 88 lfs: uploading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
89 89 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
90 90 lfs: uploaded 2 files (39 bytes)
91 91 1 changesets found
92 92 uncompressed size of bundle content:
93 93 adding changesets
94 94 adding manifests
95 95 adding file changes
96 96 added 1 changesets with 3 changes to 3 files
97 97
98 98 Clear the cache to force a download
99 99 $ rm -rf `hg config lfs.usercache`
100 100 $ hg --repo ../repo1 update tip -v
101 101 resolving manifests
102 102 lfs: need to transfer 2 objects (39 bytes)
103 103 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
104 104 lfs: adding 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 to the usercache
105 105 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
106 106 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
107 107 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
108 108 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
109 109 getting b
110 110 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
111 111 getting c
112 112 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
113 113 getting d
114 114 lfs: found 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 in the local lfs store
115 115 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
116 116
117 117 Test a corrupt file download, but clear the cache first to force a download.
118 118
119 119 $ rm -rf `hg config lfs.usercache`
120 120 $ cp $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 blob
121 121 $ echo 'damage' > $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
122 122 $ rm ../repo1/.hg/store/lfs/objects/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
123 123 $ rm ../repo1/*
124 124
125 125 $ hg --repo ../repo1 update -C tip -v
126 126 resolving manifests
127 127 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
128 128 abort: corrupt remote lfs object: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
129 129 [255]
130 130
131 131 The corrupted blob is not added to the usercache or local store
132 132
133 133 $ test -f ../repo1/.hg/store/lfs/objects/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
134 134 [1]
135 135 $ test -f `hg config lfs.usercache`/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
136 136 [1]
137 137 $ cp blob $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
138 138
139 139 Test a corrupted file upload
140 140
141 141 $ echo 'another lfs blob' > b
142 142 $ hg ci -m 'another blob'
143 143 $ echo 'damage' > .hg/store/lfs/objects/e6/59058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0
144 144 $ hg push -v ../repo1
145 145 pushing to ../repo1
146 146 searching for changes
147 147 lfs: uploading e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0 (17 bytes)
148 148 abort: detected corrupt lfs object: e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0
149 149 (run hg verify)
150 150 [255]
151 151
152 Archive will prefetch blobs in a group
153
154 $ rm -rf .hg/store/lfs `hg config lfs.usercache`
155 $ hg archive -vr 1 ../archive
156 lfs: need to transfer 4 objects (63 bytes)
157 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
158 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
159 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
160 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
161 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
162 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
163 lfs: adding 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 to the usercache
164 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
165 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
166 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
167 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
168 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
169 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
170 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
171 lfs: found 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 in the local lfs store
172 $ find ../archive | sort
173 ../archive
174 ../archive/.hg_archival.txt
175 ../archive/a
176 ../archive/b
177 ../archive/c
178 ../archive/d
179
152 180 Revert will prefetch blobs in a group
153 181
154 182 $ rm -rf .hg/store/lfs
155 183 $ rm -rf `hg config lfs.usercache`
156 184 $ rm *
157 185 $ hg revert --all -r 1 -v
158 186 adding a
159 187 reverting b
160 188 reverting c
161 189 reverting d
162 190 lfs: need to transfer 4 objects (63 bytes)
163 191 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
164 192 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
165 193 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
166 194 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
167 195 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
168 196 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
169 197 lfs: adding 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 to the usercache
170 198 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
171 199 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
172 200 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
173 201 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
174 202 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
175 203 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
176 204 lfs: found 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 in the local lfs store
177 205 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
178 206
179 207 Check error message when the remote missed a blob:
180 208
181 209 $ echo FFFFF > b
182 210 $ hg commit -m b -A b
183 211 $ echo FFFFF >> b
184 212 $ hg commit -m b b
185 213 $ rm -rf .hg/store/lfs
186 214 $ rm -rf `hg config lfs.usercache`
187 215 $ hg update -C '.^'
188 216 abort: LFS server error. Remote object for "b" not found:(.*)! (re)
189 217 [255]
190 218
191 219 Check error message when object does not exist:
192 220
193 221 $ cd $TESTTMP
194 222 $ hg init test && cd test
195 223 $ echo "[extensions]" >> .hg/hgrc
196 224 $ echo "lfs=" >> .hg/hgrc
197 225 $ echo "[lfs]" >> .hg/hgrc
198 226 $ echo "threshold=1" >> .hg/hgrc
199 227 $ echo a > a
200 228 $ hg add a
201 229 $ hg commit -m 'test'
202 230 $ echo aaaaa > a
203 231 $ hg commit -m 'largefile'
204 232 $ hg debugdata .hg/store/data/a.i 1 # verify this is no the file content but includes "oid", the LFS "pointer".
205 233 version https://git-lfs.github.com/spec/v1
206 234 oid sha256:bdc26931acfb734b142a8d675f205becf27560dc461f501822de13274fe6fc8a
207 235 size 6
208 236 x-is-binary 0
209 237 $ cd ..
210 238 $ rm -rf `hg config lfs.usercache`
211 239
212 240 (Restart the server in a different location so it no longer has the content)
213 241
214 242 $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
215 243 $ rm $DAEMON_PIDS
216 244 $ mkdir $TESTTMP/lfs-server2
217 245 $ cd $TESTTMP/lfs-server2
218 246 #if no-windows
219 247 $ lfs-test-server &> lfs-server.log &
220 248 $ echo $! >> $DAEMON_PIDS
221 249 #else
222 250 $ $PYTHON $TESTTMP/spawn.py >> $DAEMON_PIDS
223 251 #endif
224 252
225 253 $ cd $TESTTMP
226 254 $ hg clone test test2
227 255 updating to branch default
228 256 abort: LFS server error. Remote object for "a" not found:(.*)! (re)
229 257 [255]
230 258
231 259 $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
General Comments 0
You need to be logged in to leave comments. Login now