##// END OF EJS Templates
archive: support 'wdir()'...
Matt Harbison -
r25601:3ec8351f default
parent child Browse files
Show More
@@ -1,1373 +1,1385 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10 10
11 11 import os
12 12 import copy
13 13
14 14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
15 15 archival, pathutil, revset
16 16 from mercurial.i18n import _
17 17
18 18 import lfutil
19 19 import lfcommands
20 20 import basestore
21 21
22 22 # -- Utility functions: commonly/repeatedly needed functionality ---------------
23 23
24 24 def composelargefilematcher(match, manifest):
25 25 '''create a matcher that matches only the largefiles in the original
26 26 matcher'''
27 27 m = copy.copy(match)
28 28 lfile = lambda f: lfutil.standin(f) in manifest
29 29 m._files = filter(lfile, m._files)
30 30 m._fileroots = set(m._files)
31 31 m._always = False
32 32 origmatchfn = m.matchfn
33 33 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
34 34 return m
35 35
36 36 def composenormalfilematcher(match, manifest, exclude=None):
37 37 excluded = set()
38 38 if exclude is not None:
39 39 excluded.update(exclude)
40 40
41 41 m = copy.copy(match)
42 42 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
43 43 manifest or f in excluded)
44 44 m._files = filter(notlfile, m._files)
45 45 m._fileroots = set(m._files)
46 46 m._always = False
47 47 origmatchfn = m.matchfn
48 48 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
49 49 return m
50 50
51 51 def installnormalfilesmatchfn(manifest):
52 52 '''installmatchfn with a matchfn that ignores all largefiles'''
53 53 def overridematch(ctx, pats=[], opts={}, globbed=False,
54 54 default='relpath', badfn=None):
55 55 match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn)
56 56 return composenormalfilematcher(match, manifest)
57 57 oldmatch = installmatchfn(overridematch)
58 58
59 59 def installmatchfn(f):
60 60 '''monkey patch the scmutil module with a custom match function.
61 61 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
62 62 oldmatch = scmutil.match
63 63 setattr(f, 'oldmatch', oldmatch)
64 64 scmutil.match = f
65 65 return oldmatch
66 66
67 67 def restorematchfn():
68 68 '''restores scmutil.match to what it was before installmatchfn
69 69 was called. no-op if scmutil.match is its original function.
70 70
71 71 Note that n calls to installmatchfn will require n calls to
72 72 restore the original matchfn.'''
73 73 scmutil.match = getattr(scmutil.match, 'oldmatch')
74 74
75 75 def installmatchandpatsfn(f):
76 76 oldmatchandpats = scmutil.matchandpats
77 77 setattr(f, 'oldmatchandpats', oldmatchandpats)
78 78 scmutil.matchandpats = f
79 79 return oldmatchandpats
80 80
81 81 def restorematchandpatsfn():
82 82 '''restores scmutil.matchandpats to what it was before
83 83 installmatchandpatsfn was called. No-op if scmutil.matchandpats
84 84 is its original function.
85 85
86 86 Note that n calls to installmatchandpatsfn will require n calls
87 87 to restore the original matchfn.'''
88 88 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
89 89 scmutil.matchandpats)
90 90
91 91 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
92 92 large = opts.get('large')
93 93 lfsize = lfutil.getminsize(
94 94 ui, lfutil.islfilesrepo(repo), opts.get('lfsize'))
95 95
96 96 lfmatcher = None
97 97 if lfutil.islfilesrepo(repo):
98 98 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
99 99 if lfpats:
100 100 lfmatcher = match_.match(repo.root, '', list(lfpats))
101 101
102 102 lfnames = []
103 103 m = matcher
104 104
105 105 wctx = repo[None]
106 106 for f in repo.walk(match_.badmatch(m, lambda x, y: None)):
107 107 exact = m.exact(f)
108 108 lfile = lfutil.standin(f) in wctx
109 109 nfile = f in wctx
110 110 exists = lfile or nfile
111 111
112 112 # addremove in core gets fancy with the name, add doesn't
113 113 if isaddremove:
114 114 name = m.uipath(f)
115 115 else:
116 116 name = m.rel(f)
117 117
118 118 # Don't warn the user when they attempt to add a normal tracked file.
119 119 # The normal add code will do that for us.
120 120 if exact and exists:
121 121 if lfile:
122 122 ui.warn(_('%s already a largefile\n') % name)
123 123 continue
124 124
125 125 if (exact or not exists) and not lfutil.isstandin(f):
126 126 # In case the file was removed previously, but not committed
127 127 # (issue3507)
128 128 if not repo.wvfs.exists(f):
129 129 continue
130 130
131 131 abovemin = (lfsize and
132 132 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
133 133 if large or abovemin or (lfmatcher and lfmatcher(f)):
134 134 lfnames.append(f)
135 135 if ui.verbose or not exact:
136 136 ui.status(_('adding %s as a largefile\n') % name)
137 137
138 138 bad = []
139 139
140 140 # Need to lock, otherwise there could be a race condition between
141 141 # when standins are created and added to the repo.
142 142 wlock = repo.wlock()
143 143 try:
144 144 if not opts.get('dry_run'):
145 145 standins = []
146 146 lfdirstate = lfutil.openlfdirstate(ui, repo)
147 147 for f in lfnames:
148 148 standinname = lfutil.standin(f)
149 149 lfutil.writestandin(repo, standinname, hash='',
150 150 executable=lfutil.getexecutable(repo.wjoin(f)))
151 151 standins.append(standinname)
152 152 if lfdirstate[f] == 'r':
153 153 lfdirstate.normallookup(f)
154 154 else:
155 155 lfdirstate.add(f)
156 156 lfdirstate.write()
157 157 bad += [lfutil.splitstandin(f)
158 158 for f in repo[None].add(standins)
159 159 if f in m.files()]
160 160
161 161 added = [f for f in lfnames if f not in bad]
162 162 finally:
163 163 wlock.release()
164 164 return added, bad
165 165
166 166 def removelargefiles(ui, repo, isaddremove, matcher, **opts):
167 167 after = opts.get('after')
168 168 m = composelargefilematcher(matcher, repo[None].manifest())
169 169 try:
170 170 repo.lfstatus = True
171 171 s = repo.status(match=m, clean=not isaddremove)
172 172 finally:
173 173 repo.lfstatus = False
174 174 manifest = repo[None].manifest()
175 175 modified, added, deleted, clean = [[f for f in list
176 176 if lfutil.standin(f) in manifest]
177 177 for list in (s.modified, s.added,
178 178 s.deleted, s.clean)]
179 179
180 180 def warn(files, msg):
181 181 for f in files:
182 182 ui.warn(msg % m.rel(f))
183 183 return int(len(files) > 0)
184 184
185 185 result = 0
186 186
187 187 if after:
188 188 remove = deleted
189 189 result = warn(modified + added + clean,
190 190 _('not removing %s: file still exists\n'))
191 191 else:
192 192 remove = deleted + clean
193 193 result = warn(modified, _('not removing %s: file is modified (use -f'
194 194 ' to force removal)\n'))
195 195 result = warn(added, _('not removing %s: file has been marked for add'
196 196 ' (use forget to undo)\n')) or result
197 197
198 198 # Need to lock because standin files are deleted then removed from the
199 199 # repository and we could race in-between.
200 200 wlock = repo.wlock()
201 201 try:
202 202 lfdirstate = lfutil.openlfdirstate(ui, repo)
203 203 for f in sorted(remove):
204 204 if ui.verbose or not m.exact(f):
205 205 # addremove in core gets fancy with the name, remove doesn't
206 206 if isaddremove:
207 207 name = m.uipath(f)
208 208 else:
209 209 name = m.rel(f)
210 210 ui.status(_('removing %s\n') % name)
211 211
212 212 if not opts.get('dry_run'):
213 213 if not after:
214 214 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
215 215
216 216 if opts.get('dry_run'):
217 217 return result
218 218
219 219 remove = [lfutil.standin(f) for f in remove]
220 220 # If this is being called by addremove, let the original addremove
221 221 # function handle this.
222 222 if not isaddremove:
223 223 for f in remove:
224 224 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
225 225 repo[None].forget(remove)
226 226
227 227 for f in remove:
228 228 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
229 229 False)
230 230
231 231 lfdirstate.write()
232 232 finally:
233 233 wlock.release()
234 234
235 235 return result
236 236
237 237 # For overriding mercurial.hgweb.webcommands so that largefiles will
238 238 # appear at their right place in the manifests.
239 239 def decodepath(orig, path):
240 240 return lfutil.splitstandin(path) or path
241 241
242 242 # -- Wrappers: modify existing commands --------------------------------
243 243
244 244 def overrideadd(orig, ui, repo, *pats, **opts):
245 245 if opts.get('normal') and opts.get('large'):
246 246 raise util.Abort(_('--normal cannot be used with --large'))
247 247 return orig(ui, repo, *pats, **opts)
248 248
249 249 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
250 250 # The --normal flag short circuits this override
251 251 if opts.get('normal'):
252 252 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
253 253
254 254 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
255 255 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
256 256 ladded)
257 257 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
258 258
259 259 bad.extend(f for f in lbad)
260 260 return bad
261 261
262 262 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos):
263 263 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
264 264 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos)
265 265 return removelargefiles(ui, repo, False, matcher, after=after,
266 266 force=force) or result
267 267
268 268 def overridestatusfn(orig, repo, rev2, **opts):
269 269 try:
270 270 repo._repo.lfstatus = True
271 271 return orig(repo, rev2, **opts)
272 272 finally:
273 273 repo._repo.lfstatus = False
274 274
275 275 def overridestatus(orig, ui, repo, *pats, **opts):
276 276 try:
277 277 repo.lfstatus = True
278 278 return orig(ui, repo, *pats, **opts)
279 279 finally:
280 280 repo.lfstatus = False
281 281
282 282 def overridedirty(orig, repo, ignoreupdate=False):
283 283 try:
284 284 repo._repo.lfstatus = True
285 285 return orig(repo, ignoreupdate)
286 286 finally:
287 287 repo._repo.lfstatus = False
288 288
289 289 def overridelog(orig, ui, repo, *pats, **opts):
290 290 def overridematchandpats(ctx, pats=[], opts={}, globbed=False,
291 291 default='relpath', badfn=None):
292 292 """Matcher that merges root directory with .hglf, suitable for log.
293 293 It is still possible to match .hglf directly.
294 294 For any listed files run log on the standin too.
295 295 matchfn tries both the given filename and with .hglf stripped.
296 296 """
297 297 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default,
298 298 badfn=badfn)
299 299 m, p = copy.copy(matchandpats)
300 300
301 301 if m.always():
302 302 # We want to match everything anyway, so there's no benefit trying
303 303 # to add standins.
304 304 return matchandpats
305 305
306 306 pats = set(p)
307 307
308 308 def fixpats(pat, tostandin=lfutil.standin):
309 309 if pat.startswith('set:'):
310 310 return pat
311 311
312 312 kindpat = match_._patsplit(pat, None)
313 313
314 314 if kindpat[0] is not None:
315 315 return kindpat[0] + ':' + tostandin(kindpat[1])
316 316 return tostandin(kindpat[1])
317 317
318 318 if m._cwd:
319 319 hglf = lfutil.shortname
320 320 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
321 321
322 322 def tostandin(f):
323 323 # The file may already be a standin, so trucate the back
324 324 # prefix and test before mangling it. This avoids turning
325 325 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
326 326 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
327 327 return f
328 328
329 329 # An absolute path is from outside the repo, so truncate the
330 330 # path to the root before building the standin. Otherwise cwd
331 331 # is somewhere in the repo, relative to root, and needs to be
332 332 # prepended before building the standin.
333 333 if os.path.isabs(m._cwd):
334 334 f = f[len(back):]
335 335 else:
336 336 f = m._cwd + '/' + f
337 337 return back + lfutil.standin(f)
338 338
339 339 pats.update(fixpats(f, tostandin) for f in p)
340 340 else:
341 341 def tostandin(f):
342 342 if lfutil.splitstandin(f):
343 343 return f
344 344 return lfutil.standin(f)
345 345 pats.update(fixpats(f, tostandin) for f in p)
346 346
347 347 for i in range(0, len(m._files)):
348 348 # Don't add '.hglf' to m.files, since that is already covered by '.'
349 349 if m._files[i] == '.':
350 350 continue
351 351 standin = lfutil.standin(m._files[i])
352 352 # If the "standin" is a directory, append instead of replace to
353 353 # support naming a directory on the command line with only
354 354 # largefiles. The original directory is kept to support normal
355 355 # files.
356 356 if standin in repo[ctx.node()]:
357 357 m._files[i] = standin
358 358 elif m._files[i] not in repo[ctx.node()] \
359 359 and repo.wvfs.isdir(standin):
360 360 m._files.append(standin)
361 361
362 362 m._fileroots = set(m._files)
363 363 m._always = False
364 364 origmatchfn = m.matchfn
365 365 def lfmatchfn(f):
366 366 lf = lfutil.splitstandin(f)
367 367 if lf is not None and origmatchfn(lf):
368 368 return True
369 369 r = origmatchfn(f)
370 370 return r
371 371 m.matchfn = lfmatchfn
372 372
373 373 ui.debug('updated patterns: %s\n' % sorted(pats))
374 374 return m, pats
375 375
376 376 # For hg log --patch, the match object is used in two different senses:
377 377 # (1) to determine what revisions should be printed out, and
378 378 # (2) to determine what files to print out diffs for.
379 379 # The magic matchandpats override should be used for case (1) but not for
380 380 # case (2).
381 381 def overridemakelogfilematcher(repo, pats, opts, badfn=None):
382 382 wctx = repo[None]
383 383 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
384 384 return lambda rev: match
385 385
386 386 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
387 387 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
388 388 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
389 389
390 390 try:
391 391 return orig(ui, repo, *pats, **opts)
392 392 finally:
393 393 restorematchandpatsfn()
394 394 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
395 395
396 396 def overrideverify(orig, ui, repo, *pats, **opts):
397 397 large = opts.pop('large', False)
398 398 all = opts.pop('lfa', False)
399 399 contents = opts.pop('lfc', False)
400 400
401 401 result = orig(ui, repo, *pats, **opts)
402 402 if large or all or contents:
403 403 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
404 404 return result
405 405
406 406 def overridedebugstate(orig, ui, repo, *pats, **opts):
407 407 large = opts.pop('large', False)
408 408 if large:
409 409 class fakerepo(object):
410 410 dirstate = lfutil.openlfdirstate(ui, repo)
411 411 orig(ui, fakerepo, *pats, **opts)
412 412 else:
413 413 orig(ui, repo, *pats, **opts)
414 414
415 415 # Before starting the manifest merge, merge.updates will call
416 416 # _checkunknownfile to check if there are any files in the merged-in
417 417 # changeset that collide with unknown files in the working copy.
418 418 #
419 419 # The largefiles are seen as unknown, so this prevents us from merging
420 420 # in a file 'foo' if we already have a largefile with the same name.
421 421 #
422 422 # The overridden function filters the unknown files by removing any
423 423 # largefiles. This makes the merge proceed and we can then handle this
424 424 # case further in the overridden calculateupdates function below.
425 425 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
426 426 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
427 427 return False
428 428 return origfn(repo, wctx, mctx, f, f2)
429 429
430 430 # The manifest merge handles conflicts on the manifest level. We want
431 431 # to handle changes in largefile-ness of files at this level too.
432 432 #
433 433 # The strategy is to run the original calculateupdates and then process
434 434 # the action list it outputs. There are two cases we need to deal with:
435 435 #
436 436 # 1. Normal file in p1, largefile in p2. Here the largefile is
437 437 # detected via its standin file, which will enter the working copy
438 438 # with a "get" action. It is not "merge" since the standin is all
439 439 # Mercurial is concerned with at this level -- the link to the
440 440 # existing normal file is not relevant here.
441 441 #
442 442 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
443 443 # since the largefile will be present in the working copy and
444 444 # different from the normal file in p2. Mercurial therefore
445 445 # triggers a merge action.
446 446 #
447 447 # In both cases, we prompt the user and emit new actions to either
448 448 # remove the standin (if the normal file was kept) or to remove the
449 449 # normal file and get the standin (if the largefile was kept). The
450 450 # default prompt answer is to use the largefile version since it was
451 451 # presumably changed on purpose.
452 452 #
453 453 # Finally, the merge.applyupdates function will then take care of
454 454 # writing the files into the working copy and lfcommands.updatelfiles
455 455 # will update the largefiles.
456 456 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
457 457 partial, acceptremote, followcopies):
458 458 overwrite = force and not branchmerge
459 459 actions, diverge, renamedelete = origfn(
460 460 repo, p1, p2, pas, branchmerge, force, partial, acceptremote,
461 461 followcopies)
462 462
463 463 if overwrite:
464 464 return actions, diverge, renamedelete
465 465
466 466 # Convert to dictionary with filename as key and action as value.
467 467 lfiles = set()
468 468 for f in actions:
469 469 splitstandin = f and lfutil.splitstandin(f)
470 470 if splitstandin in p1:
471 471 lfiles.add(splitstandin)
472 472 elif lfutil.standin(f) in p1:
473 473 lfiles.add(f)
474 474
475 475 for lfile in lfiles:
476 476 standin = lfutil.standin(lfile)
477 477 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
478 478 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
479 479 if sm in ('g', 'dc') and lm != 'r':
480 480 # Case 1: normal file in the working copy, largefile in
481 481 # the second parent
482 482 usermsg = _('remote turned local normal file %s into a largefile\n'
483 483 'use (l)argefile or keep (n)ormal file?'
484 484 '$$ &Largefile $$ &Normal file') % lfile
485 485 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
486 486 actions[lfile] = ('r', None, 'replaced by standin')
487 487 actions[standin] = ('g', sargs, 'replaces standin')
488 488 else: # keep local normal file
489 489 actions[lfile] = ('k', None, 'replaces standin')
490 490 if branchmerge:
491 491 actions[standin] = ('k', None, 'replaced by non-standin')
492 492 else:
493 493 actions[standin] = ('r', None, 'replaced by non-standin')
494 494 elif lm in ('g', 'dc') and sm != 'r':
495 495 # Case 2: largefile in the working copy, normal file in
496 496 # the second parent
497 497 usermsg = _('remote turned local largefile %s into a normal file\n'
498 498 'keep (l)argefile or use (n)ormal file?'
499 499 '$$ &Largefile $$ &Normal file') % lfile
500 500 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
501 501 if branchmerge:
502 502 # largefile can be restored from standin safely
503 503 actions[lfile] = ('k', None, 'replaced by standin')
504 504 actions[standin] = ('k', None, 'replaces standin')
505 505 else:
506 506 # "lfile" should be marked as "removed" without
507 507 # removal of itself
508 508 actions[lfile] = ('lfmr', None,
509 509 'forget non-standin largefile')
510 510
511 511 # linear-merge should treat this largefile as 're-added'
512 512 actions[standin] = ('a', None, 'keep standin')
513 513 else: # pick remote normal file
514 514 actions[lfile] = ('g', largs, 'replaces standin')
515 515 actions[standin] = ('r', None, 'replaced by non-standin')
516 516
517 517 return actions, diverge, renamedelete
518 518
519 519 def mergerecordupdates(orig, repo, actions, branchmerge):
520 520 if 'lfmr' in actions:
521 521 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
522 522 for lfile, args, msg in actions['lfmr']:
523 523 # this should be executed before 'orig', to execute 'remove'
524 524 # before all other actions
525 525 repo.dirstate.remove(lfile)
526 526 # make sure lfile doesn't get synclfdirstate'd as normal
527 527 lfdirstate.add(lfile)
528 528 lfdirstate.write()
529 529
530 530 return orig(repo, actions, branchmerge)
531 531
532 532
533 533 # Override filemerge to prompt the user about how they wish to merge
534 534 # largefiles. This will handle identical edits without prompting the user.
535 535 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca, labels=None):
536 536 if not lfutil.isstandin(orig):
537 537 return origfn(repo, mynode, orig, fcd, fco, fca, labels=labels)
538 538
539 539 ahash = fca.data().strip().lower()
540 540 dhash = fcd.data().strip().lower()
541 541 ohash = fco.data().strip().lower()
542 542 if (ohash != ahash and
543 543 ohash != dhash and
544 544 (dhash == ahash or
545 545 repo.ui.promptchoice(
546 546 _('largefile %s has a merge conflict\nancestor was %s\n'
547 547 'keep (l)ocal %s or\ntake (o)ther %s?'
548 548 '$$ &Local $$ &Other') %
549 549 (lfutil.splitstandin(orig), ahash, dhash, ohash),
550 550 0) == 1)):
551 551 repo.wwrite(fcd.path(), fco.data(), fco.flags())
552 552 return 0
553 553
554 554 def copiespathcopies(orig, ctx1, ctx2, match=None):
555 555 copies = orig(ctx1, ctx2, match=match)
556 556 updated = {}
557 557
558 558 for k, v in copies.iteritems():
559 559 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
560 560
561 561 return updated
562 562
563 563 # Copy first changes the matchers to match standins instead of
564 564 # largefiles. Then it overrides util.copyfile in that function it
565 565 # checks if the destination largefile already exists. It also keeps a
566 566 # list of copied files so that the largefiles can be copied and the
567 567 # dirstate updated.
568 568 def overridecopy(orig, ui, repo, pats, opts, rename=False):
569 569 # doesn't remove largefile on rename
570 570 if len(pats) < 2:
571 571 # this isn't legal, let the original function deal with it
572 572 return orig(ui, repo, pats, opts, rename)
573 573
574 574 # This could copy both lfiles and normal files in one command,
575 575 # but we don't want to do that. First replace their matcher to
576 576 # only match normal files and run it, then replace it to just
577 577 # match largefiles and run it again.
578 578 nonormalfiles = False
579 579 nolfiles = False
580 580 installnormalfilesmatchfn(repo[None].manifest())
581 581 try:
582 582 result = orig(ui, repo, pats, opts, rename)
583 583 except util.Abort, e:
584 584 if str(e) != _('no files to copy'):
585 585 raise e
586 586 else:
587 587 nonormalfiles = True
588 588 result = 0
589 589 finally:
590 590 restorematchfn()
591 591
592 592 # The first rename can cause our current working directory to be removed.
593 593 # In that case there is nothing left to copy/rename so just quit.
594 594 try:
595 595 repo.getcwd()
596 596 except OSError:
597 597 return result
598 598
599 599 def makestandin(relpath):
600 600 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
601 601 return os.path.join(repo.wjoin(lfutil.standin(path)))
602 602
603 603 fullpats = scmutil.expandpats(pats)
604 604 dest = fullpats[-1]
605 605
606 606 if os.path.isdir(dest):
607 607 if not os.path.isdir(makestandin(dest)):
608 608 os.makedirs(makestandin(dest))
609 609
610 610 try:
611 611 # When we call orig below it creates the standins but we don't add
612 612 # them to the dir state until later so lock during that time.
613 613 wlock = repo.wlock()
614 614
615 615 manifest = repo[None].manifest()
616 616 def overridematch(ctx, pats=[], opts={}, globbed=False,
617 617 default='relpath', badfn=None):
618 618 newpats = []
619 619 # The patterns were previously mangled to add the standin
620 620 # directory; we need to remove that now
621 621 for pat in pats:
622 622 if match_.patkind(pat) is None and lfutil.shortname in pat:
623 623 newpats.append(pat.replace(lfutil.shortname, ''))
624 624 else:
625 625 newpats.append(pat)
626 626 match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn)
627 627 m = copy.copy(match)
628 628 lfile = lambda f: lfutil.standin(f) in manifest
629 629 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
630 630 m._fileroots = set(m._files)
631 631 origmatchfn = m.matchfn
632 632 m.matchfn = lambda f: (lfutil.isstandin(f) and
633 633 (f in manifest) and
634 634 origmatchfn(lfutil.splitstandin(f)) or
635 635 None)
636 636 return m
637 637 oldmatch = installmatchfn(overridematch)
638 638 listpats = []
639 639 for pat in pats:
640 640 if match_.patkind(pat) is not None:
641 641 listpats.append(pat)
642 642 else:
643 643 listpats.append(makestandin(pat))
644 644
645 645 try:
646 646 origcopyfile = util.copyfile
647 647 copiedfiles = []
648 648 def overridecopyfile(src, dest):
649 649 if (lfutil.shortname in src and
650 650 dest.startswith(repo.wjoin(lfutil.shortname))):
651 651 destlfile = dest.replace(lfutil.shortname, '')
652 652 if not opts['force'] and os.path.exists(destlfile):
653 653 raise IOError('',
654 654 _('destination largefile already exists'))
655 655 copiedfiles.append((src, dest))
656 656 origcopyfile(src, dest)
657 657
658 658 util.copyfile = overridecopyfile
659 659 result += orig(ui, repo, listpats, opts, rename)
660 660 finally:
661 661 util.copyfile = origcopyfile
662 662
663 663 lfdirstate = lfutil.openlfdirstate(ui, repo)
664 664 for (src, dest) in copiedfiles:
665 665 if (lfutil.shortname in src and
666 666 dest.startswith(repo.wjoin(lfutil.shortname))):
667 667 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
668 668 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
669 669 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
670 670 if not os.path.isdir(destlfiledir):
671 671 os.makedirs(destlfiledir)
672 672 if rename:
673 673 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
674 674
675 675 # The file is gone, but this deletes any empty parent
676 676 # directories as a side-effect.
677 677 util.unlinkpath(repo.wjoin(srclfile), True)
678 678 lfdirstate.remove(srclfile)
679 679 else:
680 680 util.copyfile(repo.wjoin(srclfile),
681 681 repo.wjoin(destlfile))
682 682
683 683 lfdirstate.add(destlfile)
684 684 lfdirstate.write()
685 685 except util.Abort, e:
686 686 if str(e) != _('no files to copy'):
687 687 raise e
688 688 else:
689 689 nolfiles = True
690 690 finally:
691 691 restorematchfn()
692 692 wlock.release()
693 693
694 694 if nolfiles and nonormalfiles:
695 695 raise util.Abort(_('no files to copy'))
696 696
697 697 return result
698 698
699 699 # When the user calls revert, we have to be careful to not revert any
700 700 # changes to other largefiles accidentally. This means we have to keep
701 701 # track of the largefiles that are being reverted so we only pull down
702 702 # the necessary largefiles.
703 703 #
704 704 # Standins are only updated (to match the hash of largefiles) before
705 705 # commits. Update the standins then run the original revert, changing
706 706 # the matcher to hit standins instead of largefiles. Based on the
707 707 # resulting standins update the largefiles.
708 708 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
709 709 # Because we put the standins in a bad state (by updating them)
710 710 # and then return them to a correct state we need to lock to
711 711 # prevent others from changing them in their incorrect state.
712 712 wlock = repo.wlock()
713 713 try:
714 714 lfdirstate = lfutil.openlfdirstate(ui, repo)
715 715 s = lfutil.lfdirstatestatus(lfdirstate, repo)
716 716 lfdirstate.write()
717 717 for lfile in s.modified:
718 718 lfutil.updatestandin(repo, lfutil.standin(lfile))
719 719 for lfile in s.deleted:
720 720 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
721 721 os.unlink(repo.wjoin(lfutil.standin(lfile)))
722 722
723 723 oldstandins = lfutil.getstandinsstate(repo)
724 724
725 725 def overridematch(mctx, pats=[], opts={}, globbed=False,
726 726 default='relpath', badfn=None):
727 727 match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn)
728 728 m = copy.copy(match)
729 729
730 730 # revert supports recursing into subrepos, and though largefiles
731 731 # currently doesn't work correctly in that case, this match is
732 732 # called, so the lfdirstate above may not be the correct one for
733 733 # this invocation of match.
734 734 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
735 735 False)
736 736
737 737 def tostandin(f):
738 738 standin = lfutil.standin(f)
739 739 if standin in ctx or standin in mctx:
740 740 return standin
741 741 elif standin in repo[None] or lfdirstate[f] == 'r':
742 742 return None
743 743 return f
744 744 m._files = [tostandin(f) for f in m._files]
745 745 m._files = [f for f in m._files if f is not None]
746 746 m._fileroots = set(m._files)
747 747 origmatchfn = m.matchfn
748 748 def matchfn(f):
749 749 if lfutil.isstandin(f):
750 750 return (origmatchfn(lfutil.splitstandin(f)) and
751 751 (f in ctx or f in mctx))
752 752 return origmatchfn(f)
753 753 m.matchfn = matchfn
754 754 return m
755 755 oldmatch = installmatchfn(overridematch)
756 756 try:
757 757 orig(ui, repo, ctx, parents, *pats, **opts)
758 758 finally:
759 759 restorematchfn()
760 760
761 761 newstandins = lfutil.getstandinsstate(repo)
762 762 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
763 763 # lfdirstate should be 'normallookup'-ed for updated files,
764 764 # because reverting doesn't touch dirstate for 'normal' files
765 765 # when target revision is explicitly specified: in such case,
766 766 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
767 767 # of target (standin) file.
768 768 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
769 769 normallookup=True)
770 770
771 771 finally:
772 772 wlock.release()
773 773
774 774 # after pulling changesets, we need to take some extra care to get
775 775 # largefiles updated remotely
776 776 def overridepull(orig, ui, repo, source=None, **opts):
777 777 revsprepull = len(repo)
778 778 if not source:
779 779 source = 'default'
780 780 repo.lfpullsource = source
781 781 result = orig(ui, repo, source, **opts)
782 782 revspostpull = len(repo)
783 783 lfrevs = opts.get('lfrev', [])
784 784 if opts.get('all_largefiles'):
785 785 lfrevs.append('pulled()')
786 786 if lfrevs and revspostpull > revsprepull:
787 787 numcached = 0
788 788 repo.firstpulled = revsprepull # for pulled() revset expression
789 789 try:
790 790 for rev in scmutil.revrange(repo, lfrevs):
791 791 ui.note(_('pulling largefiles for revision %s\n') % rev)
792 792 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
793 793 numcached += len(cached)
794 794 finally:
795 795 del repo.firstpulled
796 796 ui.status(_("%d largefiles cached\n") % numcached)
797 797 return result
798 798
799 799 def pulledrevsetsymbol(repo, subset, x):
800 800 """``pulled()``
801 801 Changesets that just has been pulled.
802 802
803 803 Only available with largefiles from pull --lfrev expressions.
804 804
805 805 .. container:: verbose
806 806
807 807 Some examples:
808 808
809 809 - pull largefiles for all new changesets::
810 810
811 811 hg pull -lfrev "pulled()"
812 812
813 813 - pull largefiles for all new branch heads::
814 814
815 815 hg pull -lfrev "head(pulled()) and not closed()"
816 816
817 817 """
818 818
819 819 try:
820 820 firstpulled = repo.firstpulled
821 821 except AttributeError:
822 822 raise util.Abort(_("pulled() only available in --lfrev"))
823 823 return revset.baseset([r for r in subset if r >= firstpulled])
824 824
825 825 def overrideclone(orig, ui, source, dest=None, **opts):
826 826 d = dest
827 827 if d is None:
828 828 d = hg.defaultdest(source)
829 829 if opts.get('all_largefiles') and not hg.islocal(d):
830 830 raise util.Abort(_(
831 831 '--all-largefiles is incompatible with non-local destination %s') %
832 832 d)
833 833
834 834 return orig(ui, source, dest, **opts)
835 835
836 836 def hgclone(orig, ui, opts, *args, **kwargs):
837 837 result = orig(ui, opts, *args, **kwargs)
838 838
839 839 if result is not None:
840 840 sourcerepo, destrepo = result
841 841 repo = destrepo.local()
842 842
843 843 # When cloning to a remote repo (like through SSH), no repo is available
844 844 # from the peer. Therefore the largefiles can't be downloaded and the
845 845 # hgrc can't be updated.
846 846 if not repo:
847 847 return result
848 848
849 849 # If largefiles is required for this repo, permanently enable it locally
850 850 if 'largefiles' in repo.requirements:
851 851 fp = repo.vfs('hgrc', 'a', text=True)
852 852 try:
853 853 fp.write('\n[extensions]\nlargefiles=\n')
854 854 finally:
855 855 fp.close()
856 856
857 857 # Caching is implicitly limited to 'rev' option, since the dest repo was
858 858 # truncated at that point. The user may expect a download count with
859 859 # this option, so attempt whether or not this is a largefile repo.
860 860 if opts.get('all_largefiles'):
861 861 success, missing = lfcommands.downloadlfiles(ui, repo, None)
862 862
863 863 if missing != 0:
864 864 return None
865 865
866 866 return result
867 867
868 868 def overriderebase(orig, ui, repo, **opts):
869 869 if not util.safehasattr(repo, '_largefilesenabled'):
870 870 return orig(ui, repo, **opts)
871 871
872 872 resuming = opts.get('continue')
873 873 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
874 874 repo._lfstatuswriters.append(lambda *msg, **opts: None)
875 875 try:
876 876 return orig(ui, repo, **opts)
877 877 finally:
878 878 repo._lfstatuswriters.pop()
879 879 repo._lfcommithooks.pop()
880 880
881 881 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
882 882 prefix='', mtime=None, subrepos=None):
883 883 # No need to lock because we are only reading history and
884 884 # largefile caches, neither of which are modified.
885 lfcommands.cachelfiles(repo.ui, repo, node)
885 if node is not None:
886 lfcommands.cachelfiles(repo.ui, repo, node)
886 887
887 888 if kind not in archival.archivers:
888 889 raise util.Abort(_("unknown archive type '%s'") % kind)
889 890
890 891 ctx = repo[node]
891 892
892 893 if kind == 'files':
893 894 if prefix:
894 895 raise util.Abort(
895 896 _('cannot give prefix when archiving to files'))
896 897 else:
897 898 prefix = archival.tidyprefix(dest, kind, prefix)
898 899
899 900 def write(name, mode, islink, getdata):
900 901 if matchfn and not matchfn(name):
901 902 return
902 903 data = getdata()
903 904 if decode:
904 905 data = repo.wwritedata(name, data)
905 906 archiver.addfile(prefix + name, mode, islink, data)
906 907
907 908 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
908 909
909 910 if repo.ui.configbool("ui", "archivemeta", True):
910 911 write('.hg_archival.txt', 0644, False,
911 912 lambda: archival.buildmetadata(ctx))
912 913
913 914 for f in ctx:
914 915 ff = ctx.flags(f)
915 916 getdata = ctx[f].data
916 917 if lfutil.isstandin(f):
917 path = lfutil.findfile(repo, getdata().strip())
918 if path is None:
919 raise util.Abort(
920 _('largefile %s not found in repo store or system cache')
921 % lfutil.splitstandin(f))
918 if node is not None:
919 path = lfutil.findfile(repo, getdata().strip())
920
921 if path is None:
922 raise util.Abort(
923 _('largefile %s not found in repo store or system cache')
924 % lfutil.splitstandin(f))
925 else:
926 path = lfutil.splitstandin(f)
927
922 928 f = lfutil.splitstandin(f)
923 929
924 930 def getdatafn():
925 931 fd = None
926 932 try:
927 933 fd = open(path, 'rb')
928 934 return fd.read()
929 935 finally:
930 936 if fd:
931 937 fd.close()
932 938
933 939 getdata = getdatafn
934 940 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
935 941
936 942 if subrepos:
937 943 for subpath in sorted(ctx.substate):
938 sub = ctx.sub(subpath)
944 sub = ctx.workingsub(subpath)
939 945 submatch = match_.narrowmatcher(subpath, matchfn)
940 946 sub.archive(archiver, prefix, submatch)
941 947
942 948 archiver.done()
943 949
944 950 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
945 951 repo._get(repo._state + ('hg',))
946 952 rev = repo._state[1]
947 953 ctx = repo._repo[rev]
948 954
949 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
955 if ctx.node() is not None:
956 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
950 957
951 958 def write(name, mode, islink, getdata):
952 959 # At this point, the standin has been replaced with the largefile name,
953 960 # so the normal matcher works here without the lfutil variants.
954 961 if match and not match(f):
955 962 return
956 963 data = getdata()
957 964
958 965 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
959 966
960 967 for f in ctx:
961 968 ff = ctx.flags(f)
962 969 getdata = ctx[f].data
963 970 if lfutil.isstandin(f):
964 path = lfutil.findfile(repo._repo, getdata().strip())
965 if path is None:
966 raise util.Abort(
967 _('largefile %s not found in repo store or system cache')
968 % lfutil.splitstandin(f))
971 if ctx.node() is not None:
972 path = lfutil.findfile(repo._repo, getdata().strip())
973
974 if path is None:
975 raise util.Abort(
976 _('largefile %s not found in repo store or system cache')
977 % lfutil.splitstandin(f))
978 else:
979 path = lfutil.splitstandin(f)
980
969 981 f = lfutil.splitstandin(f)
970 982
971 983 def getdatafn():
972 984 fd = None
973 985 try:
974 986 fd = open(os.path.join(prefix, path), 'rb')
975 987 return fd.read()
976 988 finally:
977 989 if fd:
978 990 fd.close()
979 991
980 992 getdata = getdatafn
981 993
982 994 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
983 995
984 996 for subpath in sorted(ctx.substate):
985 sub = ctx.sub(subpath)
997 sub = ctx.workingsub(subpath)
986 998 submatch = match_.narrowmatcher(subpath, match)
987 999 sub.archive(archiver, prefix + repo._path + '/', submatch)
988 1000
989 1001 # If a largefile is modified, the change is not reflected in its
990 1002 # standin until a commit. cmdutil.bailifchanged() raises an exception
991 1003 # if the repo has uncommitted changes. Wrap it to also check if
992 1004 # largefiles were changed. This is used by bisect, backout and fetch.
993 1005 def overridebailifchanged(orig, repo, *args, **kwargs):
994 1006 orig(repo, *args, **kwargs)
995 1007 repo.lfstatus = True
996 1008 s = repo.status()
997 1009 repo.lfstatus = False
998 1010 if s.modified or s.added or s.removed or s.deleted:
999 1011 raise util.Abort(_('uncommitted changes'))
1000 1012
1001 1013 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly):
1002 1014 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1003 1015 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly)
1004 1016 m = composelargefilematcher(match, repo[None].manifest())
1005 1017
1006 1018 try:
1007 1019 repo.lfstatus = True
1008 1020 s = repo.status(match=m, clean=True)
1009 1021 finally:
1010 1022 repo.lfstatus = False
1011 1023 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1012 1024 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
1013 1025
1014 1026 for f in forget:
1015 1027 if lfutil.standin(f) not in repo.dirstate and not \
1016 1028 repo.wvfs.isdir(lfutil.standin(f)):
1017 1029 ui.warn(_('not removing %s: file is already untracked\n')
1018 1030 % m.rel(f))
1019 1031 bad.append(f)
1020 1032
1021 1033 for f in forget:
1022 1034 if ui.verbose or not m.exact(f):
1023 1035 ui.status(_('removing %s\n') % m.rel(f))
1024 1036
1025 1037 # Need to lock because standin files are deleted then removed from the
1026 1038 # repository and we could race in-between.
1027 1039 wlock = repo.wlock()
1028 1040 try:
1029 1041 lfdirstate = lfutil.openlfdirstate(ui, repo)
1030 1042 for f in forget:
1031 1043 if lfdirstate[f] == 'a':
1032 1044 lfdirstate.drop(f)
1033 1045 else:
1034 1046 lfdirstate.remove(f)
1035 1047 lfdirstate.write()
1036 1048 standins = [lfutil.standin(f) for f in forget]
1037 1049 for f in standins:
1038 1050 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1039 1051 rejected = repo[None].forget(standins)
1040 1052 finally:
1041 1053 wlock.release()
1042 1054
1043 1055 bad.extend(f for f in rejected if f in m.files())
1044 1056 forgot.extend(f for f in forget if f not in rejected)
1045 1057 return bad, forgot
1046 1058
1047 1059 def _getoutgoings(repo, other, missing, addfunc):
1048 1060 """get pairs of filename and largefile hash in outgoing revisions
1049 1061 in 'missing'.
1050 1062
1051 1063 largefiles already existing on 'other' repository are ignored.
1052 1064
1053 1065 'addfunc' is invoked with each unique pairs of filename and
1054 1066 largefile hash value.
1055 1067 """
1056 1068 knowns = set()
1057 1069 lfhashes = set()
1058 1070 def dedup(fn, lfhash):
1059 1071 k = (fn, lfhash)
1060 1072 if k not in knowns:
1061 1073 knowns.add(k)
1062 1074 lfhashes.add(lfhash)
1063 1075 lfutil.getlfilestoupload(repo, missing, dedup)
1064 1076 if lfhashes:
1065 1077 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1066 1078 for fn, lfhash in knowns:
1067 1079 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1068 1080 addfunc(fn, lfhash)
1069 1081
1070 1082 def outgoinghook(ui, repo, other, opts, missing):
1071 1083 if opts.pop('large', None):
1072 1084 lfhashes = set()
1073 1085 if ui.debugflag:
1074 1086 toupload = {}
1075 1087 def addfunc(fn, lfhash):
1076 1088 if fn not in toupload:
1077 1089 toupload[fn] = []
1078 1090 toupload[fn].append(lfhash)
1079 1091 lfhashes.add(lfhash)
1080 1092 def showhashes(fn):
1081 1093 for lfhash in sorted(toupload[fn]):
1082 1094 ui.debug(' %s\n' % (lfhash))
1083 1095 else:
1084 1096 toupload = set()
1085 1097 def addfunc(fn, lfhash):
1086 1098 toupload.add(fn)
1087 1099 lfhashes.add(lfhash)
1088 1100 def showhashes(fn):
1089 1101 pass
1090 1102 _getoutgoings(repo, other, missing, addfunc)
1091 1103
1092 1104 if not toupload:
1093 1105 ui.status(_('largefiles: no files to upload\n'))
1094 1106 else:
1095 1107 ui.status(_('largefiles to upload (%d entities):\n')
1096 1108 % (len(lfhashes)))
1097 1109 for file in sorted(toupload):
1098 1110 ui.status(lfutil.splitstandin(file) + '\n')
1099 1111 showhashes(file)
1100 1112 ui.status('\n')
1101 1113
1102 1114 def summaryremotehook(ui, repo, opts, changes):
1103 1115 largeopt = opts.get('large', False)
1104 1116 if changes is None:
1105 1117 if largeopt:
1106 1118 return (False, True) # only outgoing check is needed
1107 1119 else:
1108 1120 return (False, False)
1109 1121 elif largeopt:
1110 1122 url, branch, peer, outgoing = changes[1]
1111 1123 if peer is None:
1112 1124 # i18n: column positioning for "hg summary"
1113 1125 ui.status(_('largefiles: (no remote repo)\n'))
1114 1126 return
1115 1127
1116 1128 toupload = set()
1117 1129 lfhashes = set()
1118 1130 def addfunc(fn, lfhash):
1119 1131 toupload.add(fn)
1120 1132 lfhashes.add(lfhash)
1121 1133 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1122 1134
1123 1135 if not toupload:
1124 1136 # i18n: column positioning for "hg summary"
1125 1137 ui.status(_('largefiles: (no files to upload)\n'))
1126 1138 else:
1127 1139 # i18n: column positioning for "hg summary"
1128 1140 ui.status(_('largefiles: %d entities for %d files to upload\n')
1129 1141 % (len(lfhashes), len(toupload)))
1130 1142
1131 1143 def overridesummary(orig, ui, repo, *pats, **opts):
1132 1144 try:
1133 1145 repo.lfstatus = True
1134 1146 orig(ui, repo, *pats, **opts)
1135 1147 finally:
1136 1148 repo.lfstatus = False
1137 1149
1138 1150 def scmutiladdremove(orig, repo, matcher, prefix, opts={}, dry_run=None,
1139 1151 similarity=None):
1140 1152 if not lfutil.islfilesrepo(repo):
1141 1153 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1142 1154 # Get the list of missing largefiles so we can remove them
1143 1155 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1144 1156 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1145 1157 False, False, False)
1146 1158
1147 1159 # Call into the normal remove code, but the removing of the standin, we want
1148 1160 # to have handled by original addremove. Monkey patching here makes sure
1149 1161 # we don't remove the standin in the largefiles code, preventing a very
1150 1162 # confused state later.
1151 1163 if s.deleted:
1152 1164 m = copy.copy(matcher)
1153 1165
1154 1166 # The m._files and m._map attributes are not changed to the deleted list
1155 1167 # because that affects the m.exact() test, which in turn governs whether
1156 1168 # or not the file name is printed, and how. Simply limit the original
1157 1169 # matches to those in the deleted status list.
1158 1170 matchfn = m.matchfn
1159 1171 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1160 1172
1161 1173 removelargefiles(repo.ui, repo, True, m, **opts)
1162 1174 # Call into the normal add code, and any files that *should* be added as
1163 1175 # largefiles will be
1164 1176 added, bad = addlargefiles(repo.ui, repo, True, matcher, **opts)
1165 1177 # Now that we've handled largefiles, hand off to the original addremove
1166 1178 # function to take care of the rest. Make sure it doesn't do anything with
1167 1179 # largefiles by passing a matcher that will ignore them.
1168 1180 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1169 1181 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1170 1182
1171 1183 # Calling purge with --all will cause the largefiles to be deleted.
1172 1184 # Override repo.status to prevent this from happening.
1173 1185 def overridepurge(orig, ui, repo, *dirs, **opts):
1174 1186 # XXX Monkey patching a repoview will not work. The assigned attribute will
1175 1187 # be set on the unfiltered repo, but we will only lookup attributes in the
1176 1188 # unfiltered repo if the lookup in the repoview object itself fails. As the
1177 1189 # monkey patched method exists on the repoview class the lookup will not
1178 1190 # fail. As a result, the original version will shadow the monkey patched
1179 1191 # one, defeating the monkey patch.
1180 1192 #
1181 1193 # As a work around we use an unfiltered repo here. We should do something
1182 1194 # cleaner instead.
1183 1195 repo = repo.unfiltered()
1184 1196 oldstatus = repo.status
1185 1197 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1186 1198 clean=False, unknown=False, listsubrepos=False):
1187 1199 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1188 1200 listsubrepos)
1189 1201 lfdirstate = lfutil.openlfdirstate(ui, repo)
1190 1202 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1191 1203 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1192 1204 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1193 1205 unknown, ignored, r.clean)
1194 1206 repo.status = overridestatus
1195 1207 orig(ui, repo, *dirs, **opts)
1196 1208 repo.status = oldstatus
1197 1209 def overriderollback(orig, ui, repo, **opts):
1198 1210 wlock = repo.wlock()
1199 1211 try:
1200 1212 before = repo.dirstate.parents()
1201 1213 orphans = set(f for f in repo.dirstate
1202 1214 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1203 1215 result = orig(ui, repo, **opts)
1204 1216 after = repo.dirstate.parents()
1205 1217 if before == after:
1206 1218 return result # no need to restore standins
1207 1219
1208 1220 pctx = repo['.']
1209 1221 for f in repo.dirstate:
1210 1222 if lfutil.isstandin(f):
1211 1223 orphans.discard(f)
1212 1224 if repo.dirstate[f] == 'r':
1213 1225 repo.wvfs.unlinkpath(f, ignoremissing=True)
1214 1226 elif f in pctx:
1215 1227 fctx = pctx[f]
1216 1228 repo.wwrite(f, fctx.data(), fctx.flags())
1217 1229 else:
1218 1230 # content of standin is not so important in 'a',
1219 1231 # 'm' or 'n' (coming from the 2nd parent) cases
1220 1232 lfutil.writestandin(repo, f, '', False)
1221 1233 for standin in orphans:
1222 1234 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1223 1235
1224 1236 lfdirstate = lfutil.openlfdirstate(ui, repo)
1225 1237 orphans = set(lfdirstate)
1226 1238 lfiles = lfutil.listlfiles(repo)
1227 1239 for file in lfiles:
1228 1240 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1229 1241 orphans.discard(file)
1230 1242 for lfile in orphans:
1231 1243 lfdirstate.drop(lfile)
1232 1244 lfdirstate.write()
1233 1245 finally:
1234 1246 wlock.release()
1235 1247 return result
1236 1248
1237 1249 def overridetransplant(orig, ui, repo, *revs, **opts):
1238 1250 resuming = opts.get('continue')
1239 1251 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1240 1252 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1241 1253 try:
1242 1254 result = orig(ui, repo, *revs, **opts)
1243 1255 finally:
1244 1256 repo._lfstatuswriters.pop()
1245 1257 repo._lfcommithooks.pop()
1246 1258 return result
1247 1259
1248 1260 def overridecat(orig, ui, repo, file1, *pats, **opts):
1249 1261 ctx = scmutil.revsingle(repo, opts.get('rev'))
1250 1262 err = 1
1251 1263 notbad = set()
1252 1264 m = scmutil.match(ctx, (file1,) + pats, opts)
1253 1265 origmatchfn = m.matchfn
1254 1266 def lfmatchfn(f):
1255 1267 if origmatchfn(f):
1256 1268 return True
1257 1269 lf = lfutil.splitstandin(f)
1258 1270 if lf is None:
1259 1271 return False
1260 1272 notbad.add(lf)
1261 1273 return origmatchfn(lf)
1262 1274 m.matchfn = lfmatchfn
1263 1275 origbadfn = m.bad
1264 1276 def lfbadfn(f, msg):
1265 1277 if not f in notbad:
1266 1278 origbadfn(f, msg)
1267 1279 m.bad = lfbadfn
1268 1280
1269 1281 origvisitdirfn = m.visitdir
1270 1282 def lfvisitdirfn(dir):
1271 1283 if dir == lfutil.shortname:
1272 1284 return True
1273 1285 ret = origvisitdirfn(dir)
1274 1286 if ret:
1275 1287 return ret
1276 1288 lf = lfutil.splitstandin(dir)
1277 1289 if lf is None:
1278 1290 return False
1279 1291 return origvisitdirfn(lf)
1280 1292 m.visitdir = lfvisitdirfn
1281 1293
1282 1294 for f in ctx.walk(m):
1283 1295 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1284 1296 pathname=f)
1285 1297 lf = lfutil.splitstandin(f)
1286 1298 if lf is None or origmatchfn(f):
1287 1299 # duplicating unreachable code from commands.cat
1288 1300 data = ctx[f].data()
1289 1301 if opts.get('decode'):
1290 1302 data = repo.wwritedata(f, data)
1291 1303 fp.write(data)
1292 1304 else:
1293 1305 hash = lfutil.readstandin(repo, lf, ctx.rev())
1294 1306 if not lfutil.inusercache(repo.ui, hash):
1295 1307 store = basestore._openstore(repo)
1296 1308 success, missing = store.get([(lf, hash)])
1297 1309 if len(success) != 1:
1298 1310 raise util.Abort(
1299 1311 _('largefile %s is not in cache and could not be '
1300 1312 'downloaded') % lf)
1301 1313 path = lfutil.usercachepath(repo.ui, hash)
1302 1314 fpin = open(path, "rb")
1303 1315 for chunk in util.filechunkiter(fpin, 128 * 1024):
1304 1316 fp.write(chunk)
1305 1317 fpin.close()
1306 1318 fp.close()
1307 1319 err = 0
1308 1320 return err
1309 1321
1310 1322 def mergeupdate(orig, repo, node, branchmerge, force, partial,
1311 1323 *args, **kwargs):
1312 1324 wlock = repo.wlock()
1313 1325 try:
1314 1326 # branch | | |
1315 1327 # merge | force | partial | action
1316 1328 # -------+-------+---------+--------------
1317 1329 # x | x | x | linear-merge
1318 1330 # o | x | x | branch-merge
1319 1331 # x | o | x | overwrite (as clean update)
1320 1332 # o | o | x | force-branch-merge (*1)
1321 1333 # x | x | o | (*)
1322 1334 # o | x | o | (*)
1323 1335 # x | o | o | overwrite (as revert)
1324 1336 # o | o | o | (*)
1325 1337 #
1326 1338 # (*) don't care
1327 1339 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1328 1340
1329 1341 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1330 1342 unsure, s = lfdirstate.status(match_.always(repo.root,
1331 1343 repo.getcwd()),
1332 1344 [], False, False, False)
1333 1345 pctx = repo['.']
1334 1346 for lfile in unsure + s.modified:
1335 1347 lfileabs = repo.wvfs.join(lfile)
1336 1348 if not os.path.exists(lfileabs):
1337 1349 continue
1338 1350 lfhash = lfutil.hashrepofile(repo, lfile)
1339 1351 standin = lfutil.standin(lfile)
1340 1352 lfutil.writestandin(repo, standin, lfhash,
1341 1353 lfutil.getexecutable(lfileabs))
1342 1354 if (standin in pctx and
1343 1355 lfhash == lfutil.readstandin(repo, lfile, '.')):
1344 1356 lfdirstate.normal(lfile)
1345 1357 for lfile in s.added:
1346 1358 lfutil.updatestandin(repo, lfutil.standin(lfile))
1347 1359 lfdirstate.write()
1348 1360
1349 1361 oldstandins = lfutil.getstandinsstate(repo)
1350 1362
1351 1363 result = orig(repo, node, branchmerge, force, partial, *args, **kwargs)
1352 1364
1353 1365 newstandins = lfutil.getstandinsstate(repo)
1354 1366 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1355 1367 if branchmerge or force or partial:
1356 1368 filelist.extend(s.deleted + s.removed)
1357 1369
1358 1370 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1359 1371 normallookup=partial)
1360 1372
1361 1373 return result
1362 1374 finally:
1363 1375 wlock.release()
1364 1376
1365 1377 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1366 1378 result = orig(repo, files, *args, **kwargs)
1367 1379
1368 1380 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1369 1381 if filelist:
1370 1382 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1371 1383 printmessage=False, normallookup=True)
1372 1384
1373 1385 return result
@@ -1,326 +1,326 b''
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 import match as matchmod
10 10 import cmdutil
11 11 import scmutil, util, encoding
12 12 import cStringIO, os, tarfile, time, zipfile
13 13 import zlib, gzip
14 14 import struct
15 15 import error
16 16
17 17 # from unzip source code:
18 18 _UNX_IFREG = 0x8000
19 19 _UNX_IFLNK = 0xa000
20 20
21 21 def tidyprefix(dest, kind, prefix):
22 22 '''choose prefix to use for names in archive. make sure prefix is
23 23 safe for consumers.'''
24 24
25 25 if prefix:
26 26 prefix = util.normpath(prefix)
27 27 else:
28 28 if not isinstance(dest, str):
29 29 raise ValueError('dest must be string if no prefix')
30 30 prefix = os.path.basename(dest)
31 31 lower = prefix.lower()
32 32 for sfx in exts.get(kind, []):
33 33 if lower.endswith(sfx):
34 34 prefix = prefix[:-len(sfx)]
35 35 break
36 36 lpfx = os.path.normpath(util.localpath(prefix))
37 37 prefix = util.pconvert(lpfx)
38 38 if not prefix.endswith('/'):
39 39 prefix += '/'
40 40 # Drop the leading '.' path component if present, so Windows can read the
41 41 # zip files (issue4634)
42 42 if prefix.startswith('./'):
43 43 prefix = prefix[2:]
44 44 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
45 45 raise util.Abort(_('archive prefix contains illegal components'))
46 46 return prefix
47 47
48 48 exts = {
49 49 'tar': ['.tar'],
50 50 'tbz2': ['.tbz2', '.tar.bz2'],
51 51 'tgz': ['.tgz', '.tar.gz'],
52 52 'zip': ['.zip'],
53 53 }
54 54
55 55 def guesskind(dest):
56 56 for kind, extensions in exts.iteritems():
57 57 if any(dest.endswith(ext) for ext in extensions):
58 58 return kind
59 59 return None
60 60
61 61 def _rootctx(repo):
62 62 # repo[0] may be hidden
63 63 for rev in repo:
64 64 return repo[rev]
65 65 return repo['null']
66 66
67 67 def buildmetadata(ctx):
68 68 '''build content of .hg_archival.txt'''
69 69 repo = ctx.repo()
70 70 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
71 71 _rootctx(repo).hex(), ctx.hex(), encoding.fromlocal(ctx.branch()))
72 72
73 73 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
74 74 if repo.tagtype(t) == 'global')
75 75 if not tags:
76 76 repo.ui.pushbuffer()
77 77 opts = {'template': '{latesttag}\n{latesttagdistance}',
78 78 'style': '', 'patch': None, 'git': None}
79 79 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
80 80 ltags, dist = repo.ui.popbuffer().split('\n')
81 81 ltags = ltags.split(':')
82 82 changessince = len(repo.revs('only(.,%s)', ltags[0]))
83 83 tags = ''.join('latesttag: %s\n' % t for t in ltags)
84 84 tags += 'latesttagdistance: %s\n' % dist
85 85 tags += 'changessincelatesttag: %s\n' % changessince
86 86
87 87 return base + tags
88 88
89 89 class tarit(object):
90 90 '''write archive to tar file or stream. can write uncompressed,
91 91 or compress with gzip or bzip2.'''
92 92
93 93 class GzipFileWithTime(gzip.GzipFile):
94 94
95 95 def __init__(self, *args, **kw):
96 96 timestamp = None
97 97 if 'timestamp' in kw:
98 98 timestamp = kw.pop('timestamp')
99 99 if timestamp is None:
100 100 self.timestamp = time.time()
101 101 else:
102 102 self.timestamp = timestamp
103 103 gzip.GzipFile.__init__(self, *args, **kw)
104 104
105 105 def _write_gzip_header(self):
106 106 self.fileobj.write('\037\213') # magic header
107 107 self.fileobj.write('\010') # compression method
108 108 # Python 2.6 introduced self.name and deprecated self.filename
109 109 try:
110 110 fname = self.name
111 111 except AttributeError:
112 112 fname = self.filename
113 113 if fname and fname.endswith('.gz'):
114 114 fname = fname[:-3]
115 115 flags = 0
116 116 if fname:
117 117 flags = gzip.FNAME
118 118 self.fileobj.write(chr(flags))
119 119 gzip.write32u(self.fileobj, long(self.timestamp))
120 120 self.fileobj.write('\002')
121 121 self.fileobj.write('\377')
122 122 if fname:
123 123 self.fileobj.write(fname + '\000')
124 124
125 125 def __init__(self, dest, mtime, kind=''):
126 126 self.mtime = mtime
127 127 self.fileobj = None
128 128
129 129 def taropen(name, mode, fileobj=None):
130 130 if kind == 'gz':
131 131 mode = mode[0]
132 132 if not fileobj:
133 133 fileobj = open(name, mode + 'b')
134 134 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
135 135 zlib.Z_BEST_COMPRESSION,
136 136 fileobj, timestamp=mtime)
137 137 self.fileobj = gzfileobj
138 138 return tarfile.TarFile.taropen(name, mode, gzfileobj)
139 139 else:
140 140 return tarfile.open(name, mode + kind, fileobj)
141 141
142 142 if isinstance(dest, str):
143 143 self.z = taropen(dest, mode='w:')
144 144 else:
145 145 # Python 2.5-2.5.1 have a regression that requires a name arg
146 146 self.z = taropen(name='', mode='w|', fileobj=dest)
147 147
148 148 def addfile(self, name, mode, islink, data):
149 149 i = tarfile.TarInfo(name)
150 150 i.mtime = self.mtime
151 151 i.size = len(data)
152 152 if islink:
153 153 i.type = tarfile.SYMTYPE
154 154 i.mode = 0777
155 155 i.linkname = data
156 156 data = None
157 157 i.size = 0
158 158 else:
159 159 i.mode = mode
160 160 data = cStringIO.StringIO(data)
161 161 self.z.addfile(i, data)
162 162
163 163 def done(self):
164 164 self.z.close()
165 165 if self.fileobj:
166 166 self.fileobj.close()
167 167
168 168 class tellable(object):
169 169 '''provide tell method for zipfile.ZipFile when writing to http
170 170 response file object.'''
171 171
172 172 def __init__(self, fp):
173 173 self.fp = fp
174 174 self.offset = 0
175 175
176 176 def __getattr__(self, key):
177 177 return getattr(self.fp, key)
178 178
179 179 def write(self, s):
180 180 self.fp.write(s)
181 181 self.offset += len(s)
182 182
183 183 def tell(self):
184 184 return self.offset
185 185
186 186 class zipit(object):
187 187 '''write archive to zip file or stream. can write uncompressed,
188 188 or compressed with deflate.'''
189 189
190 190 def __init__(self, dest, mtime, compress=True):
191 191 if not isinstance(dest, str):
192 192 try:
193 193 dest.tell()
194 194 except (AttributeError, IOError):
195 195 dest = tellable(dest)
196 196 self.z = zipfile.ZipFile(dest, 'w',
197 197 compress and zipfile.ZIP_DEFLATED or
198 198 zipfile.ZIP_STORED)
199 199
200 200 # Python's zipfile module emits deprecation warnings if we try
201 201 # to store files with a date before 1980.
202 202 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
203 203 if mtime < epoch:
204 204 mtime = epoch
205 205
206 206 self.mtime = mtime
207 207 self.date_time = time.gmtime(mtime)[:6]
208 208
209 209 def addfile(self, name, mode, islink, data):
210 210 i = zipfile.ZipInfo(name, self.date_time)
211 211 i.compress_type = self.z.compression
212 212 # unzip will not honor unix file modes unless file creator is
213 213 # set to unix (id 3).
214 214 i.create_system = 3
215 215 ftype = _UNX_IFREG
216 216 if islink:
217 217 mode = 0777
218 218 ftype = _UNX_IFLNK
219 219 i.external_attr = (mode | ftype) << 16L
220 220 # add "extended-timestamp" extra block, because zip archives
221 221 # without this will be extracted with unexpected timestamp,
222 222 # if TZ is not configured as GMT
223 223 i.extra += struct.pack('<hhBl',
224 224 0x5455, # block type: "extended-timestamp"
225 225 1 + 4, # size of this block
226 226 1, # "modification time is present"
227 227 int(self.mtime)) # last modification (UTC)
228 228 self.z.writestr(i, data)
229 229
230 230 def done(self):
231 231 self.z.close()
232 232
233 233 class fileit(object):
234 234 '''write archive as files in directory.'''
235 235
236 236 def __init__(self, name, mtime):
237 237 self.basedir = name
238 238 self.opener = scmutil.opener(self.basedir)
239 239
240 240 def addfile(self, name, mode, islink, data):
241 241 if islink:
242 242 self.opener.symlink(data, name)
243 243 return
244 244 f = self.opener(name, "w", atomictemp=True)
245 245 f.write(data)
246 246 f.close()
247 247 destfile = os.path.join(self.basedir, name)
248 248 os.chmod(destfile, mode)
249 249
250 250 def done(self):
251 251 pass
252 252
253 253 archivers = {
254 254 'files': fileit,
255 255 'tar': tarit,
256 256 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
257 257 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
258 258 'uzip': lambda name, mtime: zipit(name, mtime, False),
259 259 'zip': zipit,
260 260 }
261 261
262 262 def archive(repo, dest, node, kind, decode=True, matchfn=None,
263 263 prefix='', mtime=None, subrepos=False):
264 264 '''create archive of repo as it was at node.
265 265
266 266 dest can be name of directory, name of archive file, or file
267 267 object to write archive to.
268 268
269 269 kind is type of archive to create.
270 270
271 271 decode tells whether to put files through decode filters from
272 272 hgrc.
273 273
274 274 matchfn is function to filter names of files to write to archive.
275 275
276 276 prefix is name of path to put before every archive member.'''
277 277
278 278 if kind == 'files':
279 279 if prefix:
280 280 raise util.Abort(_('cannot give prefix when archiving to files'))
281 281 else:
282 282 prefix = tidyprefix(dest, kind, prefix)
283 283
284 284 def write(name, mode, islink, getdata):
285 285 data = getdata()
286 286 if decode:
287 287 data = repo.wwritedata(name, data)
288 288 archiver.addfile(prefix + name, mode, islink, data)
289 289
290 290 if kind not in archivers:
291 291 raise util.Abort(_("unknown archive type '%s'") % kind)
292 292
293 293 ctx = repo[node]
294 294 archiver = archivers[kind](dest, mtime or ctx.date()[0])
295 295
296 296 if repo.ui.configbool("ui", "archivemeta", True):
297 297 name = '.hg_archival.txt'
298 298 if not matchfn or matchfn(name):
299 299 write(name, 0644, False, lambda: buildmetadata(ctx))
300 300
301 301 if matchfn:
302 302 files = [f for f in ctx.manifest().keys() if matchfn(f)]
303 303 else:
304 304 files = ctx.manifest().keys()
305 305 total = len(files)
306 306 if total:
307 307 files.sort()
308 308 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
309 309 for i, f in enumerate(files):
310 310 ff = ctx.flags(f)
311 311 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
312 312 repo.ui.progress(_('archiving'), i + 1, item=f,
313 313 unit=_('files'), total=total)
314 314 repo.ui.progress(_('archiving'), None)
315 315
316 316 if subrepos:
317 317 for subpath in sorted(ctx.substate):
318 sub = ctx.sub(subpath)
318 sub = ctx.workingsub(subpath)
319 319 submatch = matchmod.narrowmatcher(subpath, matchfn)
320 320 total += sub.archive(archiver, prefix, submatch)
321 321
322 322 if total == 0:
323 323 raise error.Abort(_('no files match the archive pattern'))
324 324
325 325 archiver.done()
326 326 return total
@@ -1,1910 +1,1910 b''
1 1 # subrepo.py - sub-repository handling for Mercurial
2 2 #
3 3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import copy
9 9 import errno, os, re, posixpath, sys
10 10 import xml.dom.minidom
11 11 import stat, subprocess, tarfile
12 12 from i18n import _
13 13 import config, util, node, error, cmdutil, scmutil, match as matchmod
14 14 import phases
15 15 import pathutil
16 16 import exchange
17 17 hg = None
18 18 propertycache = util.propertycache
19 19
20 20 nullstate = ('', '', 'empty')
21 21
22 22 def _expandedabspath(path):
23 23 '''
24 24 get a path or url and if it is a path expand it and return an absolute path
25 25 '''
26 26 expandedpath = util.urllocalpath(util.expandpath(path))
27 27 u = util.url(expandedpath)
28 28 if not u.scheme:
29 29 path = util.normpath(os.path.abspath(u.path))
30 30 return path
31 31
32 32 def _getstorehashcachename(remotepath):
33 33 '''get a unique filename for the store hash cache of a remote repository'''
34 34 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
35 35
36 36 class SubrepoAbort(error.Abort):
37 37 """Exception class used to avoid handling a subrepo error more than once"""
38 38 def __init__(self, *args, **kw):
39 39 error.Abort.__init__(self, *args, **kw)
40 40 self.subrepo = kw.get('subrepo')
41 41 self.cause = kw.get('cause')
42 42
43 43 def annotatesubrepoerror(func):
44 44 def decoratedmethod(self, *args, **kargs):
45 45 try:
46 46 res = func(self, *args, **kargs)
47 47 except SubrepoAbort, ex:
48 48 # This exception has already been handled
49 49 raise ex
50 50 except error.Abort, ex:
51 51 subrepo = subrelpath(self)
52 52 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
53 53 # avoid handling this exception by raising a SubrepoAbort exception
54 54 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
55 55 cause=sys.exc_info())
56 56 return res
57 57 return decoratedmethod
58 58
59 59 def state(ctx, ui):
60 60 """return a state dict, mapping subrepo paths configured in .hgsub
61 61 to tuple: (source from .hgsub, revision from .hgsubstate, kind
62 62 (key in types dict))
63 63 """
64 64 p = config.config()
65 65 def read(f, sections=None, remap=None):
66 66 if f in ctx:
67 67 try:
68 68 data = ctx[f].data()
69 69 except IOError, err:
70 70 if err.errno != errno.ENOENT:
71 71 raise
72 72 # handle missing subrepo spec files as removed
73 73 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
74 74 util.pathto(ctx.repo().root, ctx.repo().getcwd(), f))
75 75 return
76 76 p.parse(f, data, sections, remap, read)
77 77 else:
78 78 repo = ctx.repo()
79 79 raise util.Abort(_("subrepo spec file \'%s\' not found") %
80 80 util.pathto(repo.root, repo.getcwd(), f))
81 81
82 82 if '.hgsub' in ctx:
83 83 read('.hgsub')
84 84
85 85 for path, src in ui.configitems('subpaths'):
86 86 p.set('subpaths', path, src, ui.configsource('subpaths', path))
87 87
88 88 rev = {}
89 89 if '.hgsubstate' in ctx:
90 90 try:
91 91 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
92 92 l = l.lstrip()
93 93 if not l:
94 94 continue
95 95 try:
96 96 revision, path = l.split(" ", 1)
97 97 except ValueError:
98 98 repo = ctx.repo()
99 99 raise util.Abort(_("invalid subrepository revision "
100 100 "specifier in \'%s\' line %d")
101 101 % (util.pathto(repo.root, repo.getcwd(),
102 102 '.hgsubstate'), (i + 1)))
103 103 rev[path] = revision
104 104 except IOError, err:
105 105 if err.errno != errno.ENOENT:
106 106 raise
107 107
108 108 def remap(src):
109 109 for pattern, repl in p.items('subpaths'):
110 110 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
111 111 # does a string decode.
112 112 repl = repl.encode('string-escape')
113 113 # However, we still want to allow back references to go
114 114 # through unharmed, so we turn r'\\1' into r'\1'. Again,
115 115 # extra escapes are needed because re.sub string decodes.
116 116 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
117 117 try:
118 118 src = re.sub(pattern, repl, src, 1)
119 119 except re.error, e:
120 120 raise util.Abort(_("bad subrepository pattern in %s: %s")
121 121 % (p.source('subpaths', pattern), e))
122 122 return src
123 123
124 124 state = {}
125 125 for path, src in p[''].items():
126 126 kind = 'hg'
127 127 if src.startswith('['):
128 128 if ']' not in src:
129 129 raise util.Abort(_('missing ] in subrepo source'))
130 130 kind, src = src.split(']', 1)
131 131 kind = kind[1:]
132 132 src = src.lstrip() # strip any extra whitespace after ']'
133 133
134 134 if not util.url(src).isabs():
135 135 parent = _abssource(ctx.repo(), abort=False)
136 136 if parent:
137 137 parent = util.url(parent)
138 138 parent.path = posixpath.join(parent.path or '', src)
139 139 parent.path = posixpath.normpath(parent.path)
140 140 joined = str(parent)
141 141 # Remap the full joined path and use it if it changes,
142 142 # else remap the original source.
143 143 remapped = remap(joined)
144 144 if remapped == joined:
145 145 src = remap(src)
146 146 else:
147 147 src = remapped
148 148
149 149 src = remap(src)
150 150 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
151 151
152 152 return state
153 153
154 154 def writestate(repo, state):
155 155 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
156 156 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
157 157 if state[s][1] != nullstate[1]]
158 158 repo.wwrite('.hgsubstate', ''.join(lines), '')
159 159
160 160 def submerge(repo, wctx, mctx, actx, overwrite):
161 161 """delegated from merge.applyupdates: merging of .hgsubstate file
162 162 in working context, merging context and ancestor context"""
163 163 if mctx == actx: # backwards?
164 164 actx = wctx.p1()
165 165 s1 = wctx.substate
166 166 s2 = mctx.substate
167 167 sa = actx.substate
168 168 sm = {}
169 169
170 170 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
171 171
172 172 def debug(s, msg, r=""):
173 173 if r:
174 174 r = "%s:%s:%s" % r
175 175 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
176 176
177 177 for s, l in sorted(s1.iteritems()):
178 178 a = sa.get(s, nullstate)
179 179 ld = l # local state with possible dirty flag for compares
180 180 if wctx.sub(s).dirty():
181 181 ld = (l[0], l[1] + "+")
182 182 if wctx == actx: # overwrite
183 183 a = ld
184 184
185 185 if s in s2:
186 186 r = s2[s]
187 187 if ld == r or r == a: # no change or local is newer
188 188 sm[s] = l
189 189 continue
190 190 elif ld == a: # other side changed
191 191 debug(s, "other changed, get", r)
192 192 wctx.sub(s).get(r, overwrite)
193 193 sm[s] = r
194 194 elif ld[0] != r[0]: # sources differ
195 195 if repo.ui.promptchoice(
196 196 _(' subrepository sources for %s differ\n'
197 197 'use (l)ocal source (%s) or (r)emote source (%s)?'
198 198 '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
199 199 debug(s, "prompt changed, get", r)
200 200 wctx.sub(s).get(r, overwrite)
201 201 sm[s] = r
202 202 elif ld[1] == a[1]: # local side is unchanged
203 203 debug(s, "other side changed, get", r)
204 204 wctx.sub(s).get(r, overwrite)
205 205 sm[s] = r
206 206 else:
207 207 debug(s, "both sides changed")
208 208 srepo = wctx.sub(s)
209 209 option = repo.ui.promptchoice(
210 210 _(' subrepository %s diverged (local revision: %s, '
211 211 'remote revision: %s)\n'
212 212 '(M)erge, keep (l)ocal or keep (r)emote?'
213 213 '$$ &Merge $$ &Local $$ &Remote')
214 214 % (s, srepo.shortid(l[1]), srepo.shortid(r[1])), 0)
215 215 if option == 0:
216 216 wctx.sub(s).merge(r)
217 217 sm[s] = l
218 218 debug(s, "merge with", r)
219 219 elif option == 1:
220 220 sm[s] = l
221 221 debug(s, "keep local subrepo revision", l)
222 222 else:
223 223 wctx.sub(s).get(r, overwrite)
224 224 sm[s] = r
225 225 debug(s, "get remote subrepo revision", r)
226 226 elif ld == a: # remote removed, local unchanged
227 227 debug(s, "remote removed, remove")
228 228 wctx.sub(s).remove()
229 229 elif a == nullstate: # not present in remote or ancestor
230 230 debug(s, "local added, keep")
231 231 sm[s] = l
232 232 continue
233 233 else:
234 234 if repo.ui.promptchoice(
235 235 _(' local changed subrepository %s which remote removed\n'
236 236 'use (c)hanged version or (d)elete?'
237 237 '$$ &Changed $$ &Delete') % s, 0):
238 238 debug(s, "prompt remove")
239 239 wctx.sub(s).remove()
240 240
241 241 for s, r in sorted(s2.items()):
242 242 if s in s1:
243 243 continue
244 244 elif s not in sa:
245 245 debug(s, "remote added, get", r)
246 246 mctx.sub(s).get(r)
247 247 sm[s] = r
248 248 elif r != sa[s]:
249 249 if repo.ui.promptchoice(
250 250 _(' remote changed subrepository %s which local removed\n'
251 251 'use (c)hanged version or (d)elete?'
252 252 '$$ &Changed $$ &Delete') % s, 0) == 0:
253 253 debug(s, "prompt recreate", r)
254 254 mctx.sub(s).get(r)
255 255 sm[s] = r
256 256
257 257 # record merged .hgsubstate
258 258 writestate(repo, sm)
259 259 return sm
260 260
261 261 def _updateprompt(ui, sub, dirty, local, remote):
262 262 if dirty:
263 263 msg = (_(' subrepository sources for %s differ\n'
264 264 'use (l)ocal source (%s) or (r)emote source (%s)?'
265 265 '$$ &Local $$ &Remote')
266 266 % (subrelpath(sub), local, remote))
267 267 else:
268 268 msg = (_(' subrepository sources for %s differ (in checked out '
269 269 'version)\n'
270 270 'use (l)ocal source (%s) or (r)emote source (%s)?'
271 271 '$$ &Local $$ &Remote')
272 272 % (subrelpath(sub), local, remote))
273 273 return ui.promptchoice(msg, 0)
274 274
275 275 def reporelpath(repo):
276 276 """return path to this (sub)repo as seen from outermost repo"""
277 277 parent = repo
278 278 while util.safehasattr(parent, '_subparent'):
279 279 parent = parent._subparent
280 280 return repo.root[len(pathutil.normasprefix(parent.root)):]
281 281
282 282 def subrelpath(sub):
283 283 """return path to this subrepo as seen from outermost repo"""
284 284 return sub._relpath
285 285
286 286 def _abssource(repo, push=False, abort=True):
287 287 """return pull/push path of repo - either based on parent repo .hgsub info
288 288 or on the top repo config. Abort or return None if no source found."""
289 289 if util.safehasattr(repo, '_subparent'):
290 290 source = util.url(repo._subsource)
291 291 if source.isabs():
292 292 return str(source)
293 293 source.path = posixpath.normpath(source.path)
294 294 parent = _abssource(repo._subparent, push, abort=False)
295 295 if parent:
296 296 parent = util.url(util.pconvert(parent))
297 297 parent.path = posixpath.join(parent.path or '', source.path)
298 298 parent.path = posixpath.normpath(parent.path)
299 299 return str(parent)
300 300 else: # recursion reached top repo
301 301 if util.safehasattr(repo, '_subtoppath'):
302 302 return repo._subtoppath
303 303 if push and repo.ui.config('paths', 'default-push'):
304 304 return repo.ui.config('paths', 'default-push')
305 305 if repo.ui.config('paths', 'default'):
306 306 return repo.ui.config('paths', 'default')
307 307 if repo.shared():
308 308 # chop off the .hg component to get the default path form
309 309 return os.path.dirname(repo.sharedpath)
310 310 if abort:
311 311 raise util.Abort(_("default path for subrepository not found"))
312 312
313 313 def _sanitize(ui, vfs, ignore):
314 314 for dirname, dirs, names in vfs.walk():
315 315 for i, d in enumerate(dirs):
316 316 if d.lower() == ignore:
317 317 del dirs[i]
318 318 break
319 319 if os.path.basename(dirname).lower() != '.hg':
320 320 continue
321 321 for f in names:
322 322 if f.lower() == 'hgrc':
323 323 ui.warn(_("warning: removing potentially hostile 'hgrc' "
324 324 "in '%s'\n") % vfs.join(dirname))
325 325 vfs.unlink(vfs.reljoin(dirname, f))
326 326
327 327 def subrepo(ctx, path, allowwdir=False):
328 328 """return instance of the right subrepo class for subrepo in path"""
329 329 # subrepo inherently violates our import layering rules
330 330 # because it wants to make repo objects from deep inside the stack
331 331 # so we manually delay the circular imports to not break
332 332 # scripts that don't use our demand-loading
333 333 global hg
334 334 import hg as h
335 335 hg = h
336 336
337 337 pathutil.pathauditor(ctx.repo().root)(path)
338 338 state = ctx.substate[path]
339 339 if state[2] not in types:
340 340 raise util.Abort(_('unknown subrepo type %s') % state[2])
341 341 if allowwdir:
342 342 state = (state[0], ctx.subrev(path), state[2])
343 343 return types[state[2]](ctx, path, state[:2])
344 344
345 345 def nullsubrepo(ctx, path, pctx):
346 346 """return an empty subrepo in pctx for the extant subrepo in ctx"""
347 347 # subrepo inherently violates our import layering rules
348 348 # because it wants to make repo objects from deep inside the stack
349 349 # so we manually delay the circular imports to not break
350 350 # scripts that don't use our demand-loading
351 351 global hg
352 352 import hg as h
353 353 hg = h
354 354
355 355 pathutil.pathauditor(ctx.repo().root)(path)
356 356 state = ctx.substate[path]
357 357 if state[2] not in types:
358 358 raise util.Abort(_('unknown subrepo type %s') % state[2])
359 359 subrev = ''
360 360 if state[2] == 'hg':
361 361 subrev = "0" * 40
362 362 return types[state[2]](pctx, path, (state[0], subrev))
363 363
364 364 def newcommitphase(ui, ctx):
365 365 commitphase = phases.newcommitphase(ui)
366 366 substate = getattr(ctx, "substate", None)
367 367 if not substate:
368 368 return commitphase
369 369 check = ui.config('phases', 'checksubrepos', 'follow')
370 370 if check not in ('ignore', 'follow', 'abort'):
371 371 raise util.Abort(_('invalid phases.checksubrepos configuration: %s')
372 372 % (check))
373 373 if check == 'ignore':
374 374 return commitphase
375 375 maxphase = phases.public
376 376 maxsub = None
377 377 for s in sorted(substate):
378 378 sub = ctx.sub(s)
379 379 subphase = sub.phase(substate[s][1])
380 380 if maxphase < subphase:
381 381 maxphase = subphase
382 382 maxsub = s
383 383 if commitphase < maxphase:
384 384 if check == 'abort':
385 385 raise util.Abort(_("can't commit in %s phase"
386 386 " conflicting %s from subrepository %s") %
387 387 (phases.phasenames[commitphase],
388 388 phases.phasenames[maxphase], maxsub))
389 389 ui.warn(_("warning: changes are committed in"
390 390 " %s phase from subrepository %s\n") %
391 391 (phases.phasenames[maxphase], maxsub))
392 392 return maxphase
393 393 return commitphase
394 394
395 395 # subrepo classes need to implement the following abstract class:
396 396
397 397 class abstractsubrepo(object):
398 398
399 399 def __init__(self, ctx, path):
400 400 """Initialize abstractsubrepo part
401 401
402 402 ``ctx`` is the context referring this subrepository in the
403 403 parent repository.
404 404
405 405 ``path`` is the path to this subrepositiry as seen from
406 406 innermost repository.
407 407 """
408 408 self.ui = ctx.repo().ui
409 409 self._ctx = ctx
410 410 self._path = path
411 411
412 412 def storeclean(self, path):
413 413 """
414 414 returns true if the repository has not changed since it was last
415 415 cloned from or pushed to a given repository.
416 416 """
417 417 return False
418 418
419 419 def dirty(self, ignoreupdate=False):
420 420 """returns true if the dirstate of the subrepo is dirty or does not
421 421 match current stored state. If ignoreupdate is true, only check
422 422 whether the subrepo has uncommitted changes in its dirstate.
423 423 """
424 424 raise NotImplementedError
425 425
426 426 def dirtyreason(self, ignoreupdate=False):
427 427 """return reason string if it is ``dirty()``
428 428
429 429 Returned string should have enough information for the message
430 430 of exception.
431 431
432 432 This returns None, otherwise.
433 433 """
434 434 if self.dirty(ignoreupdate=ignoreupdate):
435 435 return _("uncommitted changes in subrepository '%s'"
436 436 ) % subrelpath(self)
437 437
438 438 def bailifchanged(self, ignoreupdate=False):
439 439 """raise Abort if subrepository is ``dirty()``
440 440 """
441 441 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
442 442 if dirtyreason:
443 443 raise util.Abort(dirtyreason)
444 444
445 445 def basestate(self):
446 446 """current working directory base state, disregarding .hgsubstate
447 447 state and working directory modifications"""
448 448 raise NotImplementedError
449 449
450 450 def checknested(self, path):
451 451 """check if path is a subrepository within this repository"""
452 452 return False
453 453
454 454 def commit(self, text, user, date):
455 455 """commit the current changes to the subrepo with the given
456 456 log message. Use given user and date if possible. Return the
457 457 new state of the subrepo.
458 458 """
459 459 raise NotImplementedError
460 460
461 461 def phase(self, state):
462 462 """returns phase of specified state in the subrepository.
463 463 """
464 464 return phases.public
465 465
466 466 def remove(self):
467 467 """remove the subrepo
468 468
469 469 (should verify the dirstate is not dirty first)
470 470 """
471 471 raise NotImplementedError
472 472
473 473 def get(self, state, overwrite=False):
474 474 """run whatever commands are needed to put the subrepo into
475 475 this state
476 476 """
477 477 raise NotImplementedError
478 478
479 479 def merge(self, state):
480 480 """merge currently-saved state with the new state."""
481 481 raise NotImplementedError
482 482
483 483 def push(self, opts):
484 484 """perform whatever action is analogous to 'hg push'
485 485
486 486 This may be a no-op on some systems.
487 487 """
488 488 raise NotImplementedError
489 489
490 490 def add(self, ui, match, prefix, explicitonly, **opts):
491 491 return []
492 492
493 493 def addremove(self, matcher, prefix, opts, dry_run, similarity):
494 494 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
495 495 return 1
496 496
497 497 def cat(self, match, prefix, **opts):
498 498 return 1
499 499
500 500 def status(self, rev2, **opts):
501 501 return scmutil.status([], [], [], [], [], [], [])
502 502
503 503 def diff(self, ui, diffopts, node2, match, prefix, **opts):
504 504 pass
505 505
506 506 def outgoing(self, ui, dest, opts):
507 507 return 1
508 508
509 509 def incoming(self, ui, source, opts):
510 510 return 1
511 511
512 512 def files(self):
513 513 """return filename iterator"""
514 514 raise NotImplementedError
515 515
516 516 def filedata(self, name):
517 517 """return file data"""
518 518 raise NotImplementedError
519 519
520 520 def fileflags(self, name):
521 521 """return file flags"""
522 522 return ''
523 523
524 524 def getfileset(self, expr):
525 525 """Resolve the fileset expression for this repo"""
526 526 return set()
527 527
528 528 def printfiles(self, ui, m, fm, fmt, subrepos):
529 529 """handle the files command for this subrepo"""
530 530 return 1
531 531
532 532 def archive(self, archiver, prefix, match=None):
533 533 if match is not None:
534 534 files = [f for f in self.files() if match(f)]
535 535 else:
536 536 files = self.files()
537 537 total = len(files)
538 538 relpath = subrelpath(self)
539 539 self.ui.progress(_('archiving (%s)') % relpath, 0,
540 540 unit=_('files'), total=total)
541 541 for i, name in enumerate(files):
542 542 flags = self.fileflags(name)
543 543 mode = 'x' in flags and 0755 or 0644
544 544 symlink = 'l' in flags
545 545 archiver.addfile(prefix + self._path + '/' + name,
546 546 mode, symlink, self.filedata(name))
547 547 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
548 548 unit=_('files'), total=total)
549 549 self.ui.progress(_('archiving (%s)') % relpath, None)
550 550 return total
551 551
552 552 def walk(self, match):
553 553 '''
554 554 walk recursively through the directory tree, finding all files
555 555 matched by the match function
556 556 '''
557 557 pass
558 558
559 559 def forget(self, match, prefix):
560 560 return ([], [])
561 561
562 562 def removefiles(self, matcher, prefix, after, force, subrepos):
563 563 """remove the matched files from the subrepository and the filesystem,
564 564 possibly by force and/or after the file has been removed from the
565 565 filesystem. Return 0 on success, 1 on any warning.
566 566 """
567 567 return 1
568 568
569 569 def revert(self, substate, *pats, **opts):
570 570 self.ui.warn('%s: reverting %s subrepos is unsupported\n' \
571 571 % (substate[0], substate[2]))
572 572 return []
573 573
574 574 def shortid(self, revid):
575 575 return revid
576 576
577 577 def verify(self):
578 578 '''verify the integrity of the repository. Return 0 on success or
579 579 warning, 1 on any error.
580 580 '''
581 581 return 0
582 582
583 583 @propertycache
584 584 def wvfs(self):
585 585 """return vfs to access the working directory of this subrepository
586 586 """
587 587 return scmutil.vfs(self._ctx.repo().wvfs.join(self._path))
588 588
589 589 @propertycache
590 590 def _relpath(self):
591 591 """return path to this subrepository as seen from outermost repository
592 592 """
593 593 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
594 594
595 595 class hgsubrepo(abstractsubrepo):
596 596 def __init__(self, ctx, path, state):
597 597 super(hgsubrepo, self).__init__(ctx, path)
598 598 self._state = state
599 599 r = ctx.repo()
600 600 root = r.wjoin(path)
601 601 create = not r.wvfs.exists('%s/.hg' % path)
602 602 self._repo = hg.repository(r.baseui, root, create=create)
603 603
604 604 # Propagate the parent's --hidden option
605 605 if r is r.unfiltered():
606 606 self._repo = self._repo.unfiltered()
607 607
608 608 self.ui = self._repo.ui
609 609 for s, k in [('ui', 'commitsubrepos')]:
610 610 v = r.ui.config(s, k)
611 611 if v:
612 612 self.ui.setconfig(s, k, v, 'subrepo')
613 613 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
614 614 self._initrepo(r, state[0], create)
615 615
616 616 def storeclean(self, path):
617 617 lock = self._repo.lock()
618 618 try:
619 619 return self._storeclean(path)
620 620 finally:
621 621 lock.release()
622 622
623 623 def _storeclean(self, path):
624 624 clean = True
625 625 itercache = self._calcstorehash(path)
626 626 for filehash in self._readstorehashcache(path):
627 627 if filehash != next(itercache, None):
628 628 clean = False
629 629 break
630 630 if clean:
631 631 # if not empty:
632 632 # the cached and current pull states have a different size
633 633 clean = next(itercache, None) is None
634 634 return clean
635 635
636 636 def _calcstorehash(self, remotepath):
637 637 '''calculate a unique "store hash"
638 638
639 639 This method is used to to detect when there are changes that may
640 640 require a push to a given remote path.'''
641 641 # sort the files that will be hashed in increasing (likely) file size
642 642 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
643 643 yield '# %s\n' % _expandedabspath(remotepath)
644 644 vfs = self._repo.vfs
645 645 for relname in filelist:
646 646 filehash = util.sha1(vfs.tryread(relname)).hexdigest()
647 647 yield '%s = %s\n' % (relname, filehash)
648 648
649 649 @propertycache
650 650 def _cachestorehashvfs(self):
651 651 return scmutil.vfs(self._repo.join('cache/storehash'))
652 652
653 653 def _readstorehashcache(self, remotepath):
654 654 '''read the store hash cache for a given remote repository'''
655 655 cachefile = _getstorehashcachename(remotepath)
656 656 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
657 657
658 658 def _cachestorehash(self, remotepath):
659 659 '''cache the current store hash
660 660
661 661 Each remote repo requires its own store hash cache, because a subrepo
662 662 store may be "clean" versus a given remote repo, but not versus another
663 663 '''
664 664 cachefile = _getstorehashcachename(remotepath)
665 665 lock = self._repo.lock()
666 666 try:
667 667 storehash = list(self._calcstorehash(remotepath))
668 668 vfs = self._cachestorehashvfs
669 669 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
670 670 finally:
671 671 lock.release()
672 672
673 673 def _getctx(self):
674 674 '''fetch the context for this subrepo revision, possibly a workingctx
675 675 '''
676 676 if self._ctx.rev() is None:
677 677 return self._repo[None] # workingctx if parent is workingctx
678 678 else:
679 679 rev = self._state[1]
680 680 return self._repo[rev]
681 681
682 682 @annotatesubrepoerror
683 683 def _initrepo(self, parentrepo, source, create):
684 684 self._repo._subparent = parentrepo
685 685 self._repo._subsource = source
686 686
687 687 if create:
688 688 lines = ['[paths]\n']
689 689
690 690 def addpathconfig(key, value):
691 691 if value:
692 692 lines.append('%s = %s\n' % (key, value))
693 693 self.ui.setconfig('paths', key, value, 'subrepo')
694 694
695 695 defpath = _abssource(self._repo, abort=False)
696 696 defpushpath = _abssource(self._repo, True, abort=False)
697 697 addpathconfig('default', defpath)
698 698 if defpath != defpushpath:
699 699 addpathconfig('default-push', defpushpath)
700 700
701 701 fp = self._repo.vfs("hgrc", "w", text=True)
702 702 try:
703 703 fp.write(''.join(lines))
704 704 finally:
705 705 fp.close()
706 706
707 707 @annotatesubrepoerror
708 708 def add(self, ui, match, prefix, explicitonly, **opts):
709 709 return cmdutil.add(ui, self._repo, match,
710 710 self.wvfs.reljoin(prefix, self._path),
711 711 explicitonly, **opts)
712 712
713 713 @annotatesubrepoerror
714 714 def addremove(self, m, prefix, opts, dry_run, similarity):
715 715 # In the same way as sub directories are processed, once in a subrepo,
716 716 # always entry any of its subrepos. Don't corrupt the options that will
717 717 # be used to process sibling subrepos however.
718 718 opts = copy.copy(opts)
719 719 opts['subrepos'] = True
720 720 return scmutil.addremove(self._repo, m,
721 721 self.wvfs.reljoin(prefix, self._path), opts,
722 722 dry_run, similarity)
723 723
724 724 @annotatesubrepoerror
725 725 def cat(self, match, prefix, **opts):
726 726 rev = self._state[1]
727 727 ctx = self._repo[rev]
728 728 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
729 729
730 730 @annotatesubrepoerror
731 731 def status(self, rev2, **opts):
732 732 try:
733 733 rev1 = self._state[1]
734 734 ctx1 = self._repo[rev1]
735 735 ctx2 = self._repo[rev2]
736 736 return self._repo.status(ctx1, ctx2, **opts)
737 737 except error.RepoLookupError, inst:
738 738 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
739 739 % (inst, subrelpath(self)))
740 740 return scmutil.status([], [], [], [], [], [], [])
741 741
742 742 @annotatesubrepoerror
743 743 def diff(self, ui, diffopts, node2, match, prefix, **opts):
744 744 try:
745 745 node1 = node.bin(self._state[1])
746 746 # We currently expect node2 to come from substate and be
747 747 # in hex format
748 748 if node2 is not None:
749 749 node2 = node.bin(node2)
750 750 cmdutil.diffordiffstat(ui, self._repo, diffopts,
751 751 node1, node2, match,
752 752 prefix=posixpath.join(prefix, self._path),
753 753 listsubrepos=True, **opts)
754 754 except error.RepoLookupError, inst:
755 755 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
756 756 % (inst, subrelpath(self)))
757 757
758 758 @annotatesubrepoerror
759 759 def archive(self, archiver, prefix, match=None):
760 760 self._get(self._state + ('hg',))
761 761 total = abstractsubrepo.archive(self, archiver, prefix, match)
762 762 rev = self._state[1]
763 763 ctx = self._repo[rev]
764 764 for subpath in ctx.substate:
765 s = subrepo(ctx, subpath)
765 s = subrepo(ctx, subpath, True)
766 766 submatch = matchmod.narrowmatcher(subpath, match)
767 767 total += s.archive(archiver, prefix + self._path + '/', submatch)
768 768 return total
769 769
770 770 @annotatesubrepoerror
771 771 def dirty(self, ignoreupdate=False):
772 772 r = self._state[1]
773 773 if r == '' and not ignoreupdate: # no state recorded
774 774 return True
775 775 w = self._repo[None]
776 776 if r != w.p1().hex() and not ignoreupdate:
777 777 # different version checked out
778 778 return True
779 779 return w.dirty() # working directory changed
780 780
781 781 def basestate(self):
782 782 return self._repo['.'].hex()
783 783
784 784 def checknested(self, path):
785 785 return self._repo._checknested(self._repo.wjoin(path))
786 786
787 787 @annotatesubrepoerror
788 788 def commit(self, text, user, date):
789 789 # don't bother committing in the subrepo if it's only been
790 790 # updated
791 791 if not self.dirty(True):
792 792 return self._repo['.'].hex()
793 793 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
794 794 n = self._repo.commit(text, user, date)
795 795 if not n:
796 796 return self._repo['.'].hex() # different version checked out
797 797 return node.hex(n)
798 798
799 799 @annotatesubrepoerror
800 800 def phase(self, state):
801 801 return self._repo[state].phase()
802 802
803 803 @annotatesubrepoerror
804 804 def remove(self):
805 805 # we can't fully delete the repository as it may contain
806 806 # local-only history
807 807 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
808 808 hg.clean(self._repo, node.nullid, False)
809 809
810 810 def _get(self, state):
811 811 source, revision, kind = state
812 812 if revision in self._repo.unfiltered():
813 813 return True
814 814 self._repo._subsource = source
815 815 srcurl = _abssource(self._repo)
816 816 other = hg.peer(self._repo, {}, srcurl)
817 817 if len(self._repo) == 0:
818 818 self.ui.status(_('cloning subrepo %s from %s\n')
819 819 % (subrelpath(self), srcurl))
820 820 parentrepo = self._repo._subparent
821 821 # use self._repo.vfs instead of self.wvfs to remove .hg only
822 822 self._repo.vfs.rmtree()
823 823 other, cloned = hg.clone(self._repo._subparent.baseui, {},
824 824 other, self._repo.root,
825 825 update=False)
826 826 self._repo = cloned.local()
827 827 self._initrepo(parentrepo, source, create=True)
828 828 self._cachestorehash(srcurl)
829 829 else:
830 830 self.ui.status(_('pulling subrepo %s from %s\n')
831 831 % (subrelpath(self), srcurl))
832 832 cleansub = self.storeclean(srcurl)
833 833 exchange.pull(self._repo, other)
834 834 if cleansub:
835 835 # keep the repo clean after pull
836 836 self._cachestorehash(srcurl)
837 837 return False
838 838
839 839 @annotatesubrepoerror
840 840 def get(self, state, overwrite=False):
841 841 inrepo = self._get(state)
842 842 source, revision, kind = state
843 843 repo = self._repo
844 844 repo.ui.debug("getting subrepo %s\n" % self._path)
845 845 if inrepo:
846 846 urepo = repo.unfiltered()
847 847 ctx = urepo[revision]
848 848 if ctx.hidden():
849 849 urepo.ui.warn(
850 850 _('revision %s in subrepo %s is hidden\n') \
851 851 % (revision[0:12], self._path))
852 852 repo = urepo
853 853 hg.updaterepo(repo, revision, overwrite)
854 854
855 855 @annotatesubrepoerror
856 856 def merge(self, state):
857 857 self._get(state)
858 858 cur = self._repo['.']
859 859 dst = self._repo[state[1]]
860 860 anc = dst.ancestor(cur)
861 861
862 862 def mergefunc():
863 863 if anc == cur and dst.branch() == cur.branch():
864 864 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
865 865 hg.update(self._repo, state[1])
866 866 elif anc == dst:
867 867 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
868 868 else:
869 869 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
870 870 hg.merge(self._repo, state[1], remind=False)
871 871
872 872 wctx = self._repo[None]
873 873 if self.dirty():
874 874 if anc != dst:
875 875 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
876 876 mergefunc()
877 877 else:
878 878 mergefunc()
879 879 else:
880 880 mergefunc()
881 881
882 882 @annotatesubrepoerror
883 883 def push(self, opts):
884 884 force = opts.get('force')
885 885 newbranch = opts.get('new_branch')
886 886 ssh = opts.get('ssh')
887 887
888 888 # push subrepos depth-first for coherent ordering
889 889 c = self._repo['']
890 890 subs = c.substate # only repos that are committed
891 891 for s in sorted(subs):
892 892 if c.sub(s).push(opts) == 0:
893 893 return False
894 894
895 895 dsturl = _abssource(self._repo, True)
896 896 if not force:
897 897 if self.storeclean(dsturl):
898 898 self.ui.status(
899 899 _('no changes made to subrepo %s since last push to %s\n')
900 900 % (subrelpath(self), dsturl))
901 901 return None
902 902 self.ui.status(_('pushing subrepo %s to %s\n') %
903 903 (subrelpath(self), dsturl))
904 904 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
905 905 res = exchange.push(self._repo, other, force, newbranch=newbranch)
906 906
907 907 # the repo is now clean
908 908 self._cachestorehash(dsturl)
909 909 return res.cgresult
910 910
911 911 @annotatesubrepoerror
912 912 def outgoing(self, ui, dest, opts):
913 913 if 'rev' in opts or 'branch' in opts:
914 914 opts = copy.copy(opts)
915 915 opts.pop('rev', None)
916 916 opts.pop('branch', None)
917 917 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
918 918
919 919 @annotatesubrepoerror
920 920 def incoming(self, ui, source, opts):
921 921 if 'rev' in opts or 'branch' in opts:
922 922 opts = copy.copy(opts)
923 923 opts.pop('rev', None)
924 924 opts.pop('branch', None)
925 925 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
926 926
927 927 @annotatesubrepoerror
928 928 def files(self):
929 929 rev = self._state[1]
930 930 ctx = self._repo[rev]
931 931 return ctx.manifest().keys()
932 932
933 933 def filedata(self, name):
934 934 rev = self._state[1]
935 935 return self._repo[rev][name].data()
936 936
937 937 def fileflags(self, name):
938 938 rev = self._state[1]
939 939 ctx = self._repo[rev]
940 940 return ctx.flags(name)
941 941
942 942 @annotatesubrepoerror
943 943 def printfiles(self, ui, m, fm, fmt, subrepos):
944 944 # If the parent context is a workingctx, use the workingctx here for
945 945 # consistency.
946 946 if self._ctx.rev() is None:
947 947 ctx = self._repo[None]
948 948 else:
949 949 rev = self._state[1]
950 950 ctx = self._repo[rev]
951 951 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
952 952
953 953 @annotatesubrepoerror
954 954 def getfileset(self, expr):
955 955 if self._ctx.rev() is None:
956 956 ctx = self._repo[None]
957 957 else:
958 958 rev = self._state[1]
959 959 ctx = self._repo[rev]
960 960
961 961 files = ctx.getfileset(expr)
962 962
963 963 for subpath in ctx.substate:
964 964 sub = ctx.sub(subpath)
965 965
966 966 try:
967 967 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
968 968 except error.LookupError:
969 969 self.ui.status(_("skipping missing subrepository: %s\n")
970 970 % self.wvfs.reljoin(reporelpath(self), subpath))
971 971 return files
972 972
973 973 def walk(self, match):
974 974 ctx = self._repo[None]
975 975 return ctx.walk(match)
976 976
977 977 @annotatesubrepoerror
978 978 def forget(self, match, prefix):
979 979 return cmdutil.forget(self.ui, self._repo, match,
980 980 self.wvfs.reljoin(prefix, self._path), True)
981 981
982 982 @annotatesubrepoerror
983 983 def removefiles(self, matcher, prefix, after, force, subrepos):
984 984 return cmdutil.remove(self.ui, self._repo, matcher,
985 985 self.wvfs.reljoin(prefix, self._path),
986 986 after, force, subrepos)
987 987
988 988 @annotatesubrepoerror
989 989 def revert(self, substate, *pats, **opts):
990 990 # reverting a subrepo is a 2 step process:
991 991 # 1. if the no_backup is not set, revert all modified
992 992 # files inside the subrepo
993 993 # 2. update the subrepo to the revision specified in
994 994 # the corresponding substate dictionary
995 995 self.ui.status(_('reverting subrepo %s\n') % substate[0])
996 996 if not opts.get('no_backup'):
997 997 # Revert all files on the subrepo, creating backups
998 998 # Note that this will not recursively revert subrepos
999 999 # We could do it if there was a set:subrepos() predicate
1000 1000 opts = opts.copy()
1001 1001 opts['date'] = None
1002 1002 opts['rev'] = substate[1]
1003 1003
1004 1004 self.filerevert(*pats, **opts)
1005 1005
1006 1006 # Update the repo to the revision specified in the given substate
1007 1007 if not opts.get('dry_run'):
1008 1008 self.get(substate, overwrite=True)
1009 1009
1010 1010 def filerevert(self, *pats, **opts):
1011 1011 ctx = self._repo[opts['rev']]
1012 1012 parents = self._repo.dirstate.parents()
1013 1013 if opts.get('all'):
1014 1014 pats = ['set:modified()']
1015 1015 else:
1016 1016 pats = []
1017 1017 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1018 1018
1019 1019 def shortid(self, revid):
1020 1020 return revid[:12]
1021 1021
1022 1022 def verify(self):
1023 1023 try:
1024 1024 rev = self._state[1]
1025 1025 ctx = self._repo.unfiltered()[rev]
1026 1026 if ctx.hidden():
1027 1027 # Since hidden revisions aren't pushed/pulled, it seems worth an
1028 1028 # explicit warning.
1029 1029 ui = self._repo.ui
1030 1030 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1031 1031 (self._relpath, node.short(self._ctx.node())))
1032 1032 return 0
1033 1033 except error.RepoLookupError:
1034 1034 # A missing subrepo revision may be a case of needing to pull it, so
1035 1035 # don't treat this as an error.
1036 1036 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1037 1037 (self._relpath, node.short(self._ctx.node())))
1038 1038 return 0
1039 1039
1040 1040 @propertycache
1041 1041 def wvfs(self):
1042 1042 """return own wvfs for efficiency and consitency
1043 1043 """
1044 1044 return self._repo.wvfs
1045 1045
1046 1046 @propertycache
1047 1047 def _relpath(self):
1048 1048 """return path to this subrepository as seen from outermost repository
1049 1049 """
1050 1050 # Keep consistent dir separators by avoiding vfs.join(self._path)
1051 1051 return reporelpath(self._repo)
1052 1052
1053 1053 class svnsubrepo(abstractsubrepo):
1054 1054 def __init__(self, ctx, path, state):
1055 1055 super(svnsubrepo, self).__init__(ctx, path)
1056 1056 self._state = state
1057 1057 self._exe = util.findexe('svn')
1058 1058 if not self._exe:
1059 1059 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
1060 1060 % self._path)
1061 1061
1062 1062 def _svncommand(self, commands, filename='', failok=False):
1063 1063 cmd = [self._exe]
1064 1064 extrakw = {}
1065 1065 if not self.ui.interactive():
1066 1066 # Making stdin be a pipe should prevent svn from behaving
1067 1067 # interactively even if we can't pass --non-interactive.
1068 1068 extrakw['stdin'] = subprocess.PIPE
1069 1069 # Starting in svn 1.5 --non-interactive is a global flag
1070 1070 # instead of being per-command, but we need to support 1.4 so
1071 1071 # we have to be intelligent about what commands take
1072 1072 # --non-interactive.
1073 1073 if commands[0] in ('update', 'checkout', 'commit'):
1074 1074 cmd.append('--non-interactive')
1075 1075 cmd.extend(commands)
1076 1076 if filename is not None:
1077 1077 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1078 1078 self._path, filename)
1079 1079 cmd.append(path)
1080 1080 env = dict(os.environ)
1081 1081 # Avoid localized output, preserve current locale for everything else.
1082 1082 lc_all = env.get('LC_ALL')
1083 1083 if lc_all:
1084 1084 env['LANG'] = lc_all
1085 1085 del env['LC_ALL']
1086 1086 env['LC_MESSAGES'] = 'C'
1087 1087 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1088 1088 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1089 1089 universal_newlines=True, env=env, **extrakw)
1090 1090 stdout, stderr = p.communicate()
1091 1091 stderr = stderr.strip()
1092 1092 if not failok:
1093 1093 if p.returncode:
1094 1094 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
1095 1095 if stderr:
1096 1096 self.ui.warn(stderr + '\n')
1097 1097 return stdout, stderr
1098 1098
1099 1099 @propertycache
1100 1100 def _svnversion(self):
1101 1101 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1102 1102 m = re.search(r'^(\d+)\.(\d+)', output)
1103 1103 if not m:
1104 1104 raise util.Abort(_('cannot retrieve svn tool version'))
1105 1105 return (int(m.group(1)), int(m.group(2)))
1106 1106
1107 1107 def _wcrevs(self):
1108 1108 # Get the working directory revision as well as the last
1109 1109 # commit revision so we can compare the subrepo state with
1110 1110 # both. We used to store the working directory one.
1111 1111 output, err = self._svncommand(['info', '--xml'])
1112 1112 doc = xml.dom.minidom.parseString(output)
1113 1113 entries = doc.getElementsByTagName('entry')
1114 1114 lastrev, rev = '0', '0'
1115 1115 if entries:
1116 1116 rev = str(entries[0].getAttribute('revision')) or '0'
1117 1117 commits = entries[0].getElementsByTagName('commit')
1118 1118 if commits:
1119 1119 lastrev = str(commits[0].getAttribute('revision')) or '0'
1120 1120 return (lastrev, rev)
1121 1121
1122 1122 def _wcrev(self):
1123 1123 return self._wcrevs()[0]
1124 1124
1125 1125 def _wcchanged(self):
1126 1126 """Return (changes, extchanges, missing) where changes is True
1127 1127 if the working directory was changed, extchanges is
1128 1128 True if any of these changes concern an external entry and missing
1129 1129 is True if any change is a missing entry.
1130 1130 """
1131 1131 output, err = self._svncommand(['status', '--xml'])
1132 1132 externals, changes, missing = [], [], []
1133 1133 doc = xml.dom.minidom.parseString(output)
1134 1134 for e in doc.getElementsByTagName('entry'):
1135 1135 s = e.getElementsByTagName('wc-status')
1136 1136 if not s:
1137 1137 continue
1138 1138 item = s[0].getAttribute('item')
1139 1139 props = s[0].getAttribute('props')
1140 1140 path = e.getAttribute('path')
1141 1141 if item == 'external':
1142 1142 externals.append(path)
1143 1143 elif item == 'missing':
1144 1144 missing.append(path)
1145 1145 if (item not in ('', 'normal', 'unversioned', 'external')
1146 1146 or props not in ('', 'none', 'normal')):
1147 1147 changes.append(path)
1148 1148 for path in changes:
1149 1149 for ext in externals:
1150 1150 if path == ext or path.startswith(ext + os.sep):
1151 1151 return True, True, bool(missing)
1152 1152 return bool(changes), False, bool(missing)
1153 1153
1154 1154 def dirty(self, ignoreupdate=False):
1155 1155 if not self._wcchanged()[0]:
1156 1156 if self._state[1] in self._wcrevs() or ignoreupdate:
1157 1157 return False
1158 1158 return True
1159 1159
1160 1160 def basestate(self):
1161 1161 lastrev, rev = self._wcrevs()
1162 1162 if lastrev != rev:
1163 1163 # Last committed rev is not the same than rev. We would
1164 1164 # like to take lastrev but we do not know if the subrepo
1165 1165 # URL exists at lastrev. Test it and fallback to rev it
1166 1166 # is not there.
1167 1167 try:
1168 1168 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1169 1169 return lastrev
1170 1170 except error.Abort:
1171 1171 pass
1172 1172 return rev
1173 1173
1174 1174 @annotatesubrepoerror
1175 1175 def commit(self, text, user, date):
1176 1176 # user and date are out of our hands since svn is centralized
1177 1177 changed, extchanged, missing = self._wcchanged()
1178 1178 if not changed:
1179 1179 return self.basestate()
1180 1180 if extchanged:
1181 1181 # Do not try to commit externals
1182 1182 raise util.Abort(_('cannot commit svn externals'))
1183 1183 if missing:
1184 1184 # svn can commit with missing entries but aborting like hg
1185 1185 # seems a better approach.
1186 1186 raise util.Abort(_('cannot commit missing svn entries'))
1187 1187 commitinfo, err = self._svncommand(['commit', '-m', text])
1188 1188 self.ui.status(commitinfo)
1189 1189 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1190 1190 if not newrev:
1191 1191 if not commitinfo.strip():
1192 1192 # Sometimes, our definition of "changed" differs from
1193 1193 # svn one. For instance, svn ignores missing files
1194 1194 # when committing. If there are only missing files, no
1195 1195 # commit is made, no output and no error code.
1196 1196 raise util.Abort(_('failed to commit svn changes'))
1197 1197 raise util.Abort(commitinfo.splitlines()[-1])
1198 1198 newrev = newrev.groups()[0]
1199 1199 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1200 1200 return newrev
1201 1201
1202 1202 @annotatesubrepoerror
1203 1203 def remove(self):
1204 1204 if self.dirty():
1205 1205 self.ui.warn(_('not removing repo %s because '
1206 1206 'it has changes.\n') % self._path)
1207 1207 return
1208 1208 self.ui.note(_('removing subrepo %s\n') % self._path)
1209 1209
1210 1210 self.wvfs.rmtree(forcibly=True)
1211 1211 try:
1212 1212 self._ctx.repo().wvfs.removedirs(os.path.dirname(self._path))
1213 1213 except OSError:
1214 1214 pass
1215 1215
1216 1216 @annotatesubrepoerror
1217 1217 def get(self, state, overwrite=False):
1218 1218 if overwrite:
1219 1219 self._svncommand(['revert', '--recursive'])
1220 1220 args = ['checkout']
1221 1221 if self._svnversion >= (1, 5):
1222 1222 args.append('--force')
1223 1223 # The revision must be specified at the end of the URL to properly
1224 1224 # update to a directory which has since been deleted and recreated.
1225 1225 args.append('%s@%s' % (state[0], state[1]))
1226 1226 status, err = self._svncommand(args, failok=True)
1227 1227 _sanitize(self.ui, self.wvfs, '.svn')
1228 1228 if not re.search('Checked out revision [0-9]+.', status):
1229 1229 if ('is already a working copy for a different URL' in err
1230 1230 and (self._wcchanged()[:2] == (False, False))):
1231 1231 # obstructed but clean working copy, so just blow it away.
1232 1232 self.remove()
1233 1233 self.get(state, overwrite=False)
1234 1234 return
1235 1235 raise util.Abort((status or err).splitlines()[-1])
1236 1236 self.ui.status(status)
1237 1237
1238 1238 @annotatesubrepoerror
1239 1239 def merge(self, state):
1240 1240 old = self._state[1]
1241 1241 new = state[1]
1242 1242 wcrev = self._wcrev()
1243 1243 if new != wcrev:
1244 1244 dirty = old == wcrev or self._wcchanged()[0]
1245 1245 if _updateprompt(self.ui, self, dirty, wcrev, new):
1246 1246 self.get(state, False)
1247 1247
1248 1248 def push(self, opts):
1249 1249 # push is a no-op for SVN
1250 1250 return True
1251 1251
1252 1252 @annotatesubrepoerror
1253 1253 def files(self):
1254 1254 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1255 1255 doc = xml.dom.minidom.parseString(output)
1256 1256 paths = []
1257 1257 for e in doc.getElementsByTagName('entry'):
1258 1258 kind = str(e.getAttribute('kind'))
1259 1259 if kind != 'file':
1260 1260 continue
1261 1261 name = ''.join(c.data for c
1262 1262 in e.getElementsByTagName('name')[0].childNodes
1263 1263 if c.nodeType == c.TEXT_NODE)
1264 1264 paths.append(name.encode('utf-8'))
1265 1265 return paths
1266 1266
1267 1267 def filedata(self, name):
1268 1268 return self._svncommand(['cat'], name)[0]
1269 1269
1270 1270
1271 1271 class gitsubrepo(abstractsubrepo):
1272 1272 def __init__(self, ctx, path, state):
1273 1273 super(gitsubrepo, self).__init__(ctx, path)
1274 1274 self._state = state
1275 1275 self._abspath = ctx.repo().wjoin(path)
1276 1276 self._subparent = ctx.repo()
1277 1277 self._ensuregit()
1278 1278
1279 1279 def _ensuregit(self):
1280 1280 try:
1281 1281 self._gitexecutable = 'git'
1282 1282 out, err = self._gitnodir(['--version'])
1283 1283 except OSError, e:
1284 1284 if e.errno != 2 or os.name != 'nt':
1285 1285 raise
1286 1286 self._gitexecutable = 'git.cmd'
1287 1287 out, err = self._gitnodir(['--version'])
1288 1288 versionstatus = self._checkversion(out)
1289 1289 if versionstatus == 'unknown':
1290 1290 self.ui.warn(_('cannot retrieve git version\n'))
1291 1291 elif versionstatus == 'abort':
1292 1292 raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
1293 1293 elif versionstatus == 'warning':
1294 1294 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1295 1295
1296 1296 @staticmethod
1297 1297 def _gitversion(out):
1298 1298 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1299 1299 if m:
1300 1300 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1301 1301
1302 1302 m = re.search(r'^git version (\d+)\.(\d+)', out)
1303 1303 if m:
1304 1304 return (int(m.group(1)), int(m.group(2)), 0)
1305 1305
1306 1306 return -1
1307 1307
1308 1308 @staticmethod
1309 1309 def _checkversion(out):
1310 1310 '''ensure git version is new enough
1311 1311
1312 1312 >>> _checkversion = gitsubrepo._checkversion
1313 1313 >>> _checkversion('git version 1.6.0')
1314 1314 'ok'
1315 1315 >>> _checkversion('git version 1.8.5')
1316 1316 'ok'
1317 1317 >>> _checkversion('git version 1.4.0')
1318 1318 'abort'
1319 1319 >>> _checkversion('git version 1.5.0')
1320 1320 'warning'
1321 1321 >>> _checkversion('git version 1.9-rc0')
1322 1322 'ok'
1323 1323 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1324 1324 'ok'
1325 1325 >>> _checkversion('git version 1.9.0.GIT')
1326 1326 'ok'
1327 1327 >>> _checkversion('git version 12345')
1328 1328 'unknown'
1329 1329 >>> _checkversion('no')
1330 1330 'unknown'
1331 1331 '''
1332 1332 version = gitsubrepo._gitversion(out)
1333 1333 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1334 1334 # despite the docstring comment. For now, error on 1.4.0, warn on
1335 1335 # 1.5.0 but attempt to continue.
1336 1336 if version == -1:
1337 1337 return 'unknown'
1338 1338 if version < (1, 5, 0):
1339 1339 return 'abort'
1340 1340 elif version < (1, 6, 0):
1341 1341 return 'warning'
1342 1342 return 'ok'
1343 1343
1344 1344 def _gitcommand(self, commands, env=None, stream=False):
1345 1345 return self._gitdir(commands, env=env, stream=stream)[0]
1346 1346
1347 1347 def _gitdir(self, commands, env=None, stream=False):
1348 1348 return self._gitnodir(commands, env=env, stream=stream,
1349 1349 cwd=self._abspath)
1350 1350
1351 1351 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1352 1352 """Calls the git command
1353 1353
1354 1354 The methods tries to call the git command. versions prior to 1.6.0
1355 1355 are not supported and very probably fail.
1356 1356 """
1357 1357 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1358 1358 # unless ui.quiet is set, print git's stderr,
1359 1359 # which is mostly progress and useful info
1360 1360 errpipe = None
1361 1361 if self.ui.quiet:
1362 1362 errpipe = open(os.devnull, 'w')
1363 1363 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1364 1364 cwd=cwd, env=env, close_fds=util.closefds,
1365 1365 stdout=subprocess.PIPE, stderr=errpipe)
1366 1366 if stream:
1367 1367 return p.stdout, None
1368 1368
1369 1369 retdata = p.stdout.read().strip()
1370 1370 # wait for the child to exit to avoid race condition.
1371 1371 p.wait()
1372 1372
1373 1373 if p.returncode != 0 and p.returncode != 1:
1374 1374 # there are certain error codes that are ok
1375 1375 command = commands[0]
1376 1376 if command in ('cat-file', 'symbolic-ref'):
1377 1377 return retdata, p.returncode
1378 1378 # for all others, abort
1379 1379 raise util.Abort('git %s error %d in %s' %
1380 1380 (command, p.returncode, self._relpath))
1381 1381
1382 1382 return retdata, p.returncode
1383 1383
1384 1384 def _gitmissing(self):
1385 1385 return not self.wvfs.exists('.git')
1386 1386
1387 1387 def _gitstate(self):
1388 1388 return self._gitcommand(['rev-parse', 'HEAD'])
1389 1389
1390 1390 def _gitcurrentbranch(self):
1391 1391 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1392 1392 if err:
1393 1393 current = None
1394 1394 return current
1395 1395
1396 1396 def _gitremote(self, remote):
1397 1397 out = self._gitcommand(['remote', 'show', '-n', remote])
1398 1398 line = out.split('\n')[1]
1399 1399 i = line.index('URL: ') + len('URL: ')
1400 1400 return line[i:]
1401 1401
1402 1402 def _githavelocally(self, revision):
1403 1403 out, code = self._gitdir(['cat-file', '-e', revision])
1404 1404 return code == 0
1405 1405
1406 1406 def _gitisancestor(self, r1, r2):
1407 1407 base = self._gitcommand(['merge-base', r1, r2])
1408 1408 return base == r1
1409 1409
1410 1410 def _gitisbare(self):
1411 1411 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1412 1412
1413 1413 def _gitupdatestat(self):
1414 1414 """This must be run before git diff-index.
1415 1415 diff-index only looks at changes to file stat;
1416 1416 this command looks at file contents and updates the stat."""
1417 1417 self._gitcommand(['update-index', '-q', '--refresh'])
1418 1418
1419 1419 def _gitbranchmap(self):
1420 1420 '''returns 2 things:
1421 1421 a map from git branch to revision
1422 1422 a map from revision to branches'''
1423 1423 branch2rev = {}
1424 1424 rev2branch = {}
1425 1425
1426 1426 out = self._gitcommand(['for-each-ref', '--format',
1427 1427 '%(objectname) %(refname)'])
1428 1428 for line in out.split('\n'):
1429 1429 revision, ref = line.split(' ')
1430 1430 if (not ref.startswith('refs/heads/') and
1431 1431 not ref.startswith('refs/remotes/')):
1432 1432 continue
1433 1433 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1434 1434 continue # ignore remote/HEAD redirects
1435 1435 branch2rev[ref] = revision
1436 1436 rev2branch.setdefault(revision, []).append(ref)
1437 1437 return branch2rev, rev2branch
1438 1438
1439 1439 def _gittracking(self, branches):
1440 1440 'return map of remote branch to local tracking branch'
1441 1441 # assumes no more than one local tracking branch for each remote
1442 1442 tracking = {}
1443 1443 for b in branches:
1444 1444 if b.startswith('refs/remotes/'):
1445 1445 continue
1446 1446 bname = b.split('/', 2)[2]
1447 1447 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1448 1448 if remote:
1449 1449 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1450 1450 tracking['refs/remotes/%s/%s' %
1451 1451 (remote, ref.split('/', 2)[2])] = b
1452 1452 return tracking
1453 1453
1454 1454 def _abssource(self, source):
1455 1455 if '://' not in source:
1456 1456 # recognize the scp syntax as an absolute source
1457 1457 colon = source.find(':')
1458 1458 if colon != -1 and '/' not in source[:colon]:
1459 1459 return source
1460 1460 self._subsource = source
1461 1461 return _abssource(self)
1462 1462
1463 1463 def _fetch(self, source, revision):
1464 1464 if self._gitmissing():
1465 1465 source = self._abssource(source)
1466 1466 self.ui.status(_('cloning subrepo %s from %s\n') %
1467 1467 (self._relpath, source))
1468 1468 self._gitnodir(['clone', source, self._abspath])
1469 1469 if self._githavelocally(revision):
1470 1470 return
1471 1471 self.ui.status(_('pulling subrepo %s from %s\n') %
1472 1472 (self._relpath, self._gitremote('origin')))
1473 1473 # try only origin: the originally cloned repo
1474 1474 self._gitcommand(['fetch'])
1475 1475 if not self._githavelocally(revision):
1476 1476 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
1477 1477 (revision, self._relpath))
1478 1478
1479 1479 @annotatesubrepoerror
1480 1480 def dirty(self, ignoreupdate=False):
1481 1481 if self._gitmissing():
1482 1482 return self._state[1] != ''
1483 1483 if self._gitisbare():
1484 1484 return True
1485 1485 if not ignoreupdate and self._state[1] != self._gitstate():
1486 1486 # different version checked out
1487 1487 return True
1488 1488 # check for staged changes or modified files; ignore untracked files
1489 1489 self._gitupdatestat()
1490 1490 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1491 1491 return code == 1
1492 1492
1493 1493 def basestate(self):
1494 1494 return self._gitstate()
1495 1495
1496 1496 @annotatesubrepoerror
1497 1497 def get(self, state, overwrite=False):
1498 1498 source, revision, kind = state
1499 1499 if not revision:
1500 1500 self.remove()
1501 1501 return
1502 1502 self._fetch(source, revision)
1503 1503 # if the repo was set to be bare, unbare it
1504 1504 if self._gitisbare():
1505 1505 self._gitcommand(['config', 'core.bare', 'false'])
1506 1506 if self._gitstate() == revision:
1507 1507 self._gitcommand(['reset', '--hard', 'HEAD'])
1508 1508 return
1509 1509 elif self._gitstate() == revision:
1510 1510 if overwrite:
1511 1511 # first reset the index to unmark new files for commit, because
1512 1512 # reset --hard will otherwise throw away files added for commit,
1513 1513 # not just unmark them.
1514 1514 self._gitcommand(['reset', 'HEAD'])
1515 1515 self._gitcommand(['reset', '--hard', 'HEAD'])
1516 1516 return
1517 1517 branch2rev, rev2branch = self._gitbranchmap()
1518 1518
1519 1519 def checkout(args):
1520 1520 cmd = ['checkout']
1521 1521 if overwrite:
1522 1522 # first reset the index to unmark new files for commit, because
1523 1523 # the -f option will otherwise throw away files added for
1524 1524 # commit, not just unmark them.
1525 1525 self._gitcommand(['reset', 'HEAD'])
1526 1526 cmd.append('-f')
1527 1527 self._gitcommand(cmd + args)
1528 1528 _sanitize(self.ui, self.wvfs, '.git')
1529 1529
1530 1530 def rawcheckout():
1531 1531 # no branch to checkout, check it out with no branch
1532 1532 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1533 1533 self._relpath)
1534 1534 self.ui.warn(_('check out a git branch if you intend '
1535 1535 'to make changes\n'))
1536 1536 checkout(['-q', revision])
1537 1537
1538 1538 if revision not in rev2branch:
1539 1539 rawcheckout()
1540 1540 return
1541 1541 branches = rev2branch[revision]
1542 1542 firstlocalbranch = None
1543 1543 for b in branches:
1544 1544 if b == 'refs/heads/master':
1545 1545 # master trumps all other branches
1546 1546 checkout(['refs/heads/master'])
1547 1547 return
1548 1548 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1549 1549 firstlocalbranch = b
1550 1550 if firstlocalbranch:
1551 1551 checkout([firstlocalbranch])
1552 1552 return
1553 1553
1554 1554 tracking = self._gittracking(branch2rev.keys())
1555 1555 # choose a remote branch already tracked if possible
1556 1556 remote = branches[0]
1557 1557 if remote not in tracking:
1558 1558 for b in branches:
1559 1559 if b in tracking:
1560 1560 remote = b
1561 1561 break
1562 1562
1563 1563 if remote not in tracking:
1564 1564 # create a new local tracking branch
1565 1565 local = remote.split('/', 3)[3]
1566 1566 checkout(['-b', local, remote])
1567 1567 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1568 1568 # When updating to a tracked remote branch,
1569 1569 # if the local tracking branch is downstream of it,
1570 1570 # a normal `git pull` would have performed a "fast-forward merge"
1571 1571 # which is equivalent to updating the local branch to the remote.
1572 1572 # Since we are only looking at branching at update, we need to
1573 1573 # detect this situation and perform this action lazily.
1574 1574 if tracking[remote] != self._gitcurrentbranch():
1575 1575 checkout([tracking[remote]])
1576 1576 self._gitcommand(['merge', '--ff', remote])
1577 1577 _sanitize(self.ui, self.wvfs, '.git')
1578 1578 else:
1579 1579 # a real merge would be required, just checkout the revision
1580 1580 rawcheckout()
1581 1581
1582 1582 @annotatesubrepoerror
1583 1583 def commit(self, text, user, date):
1584 1584 if self._gitmissing():
1585 1585 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1586 1586 cmd = ['commit', '-a', '-m', text]
1587 1587 env = os.environ.copy()
1588 1588 if user:
1589 1589 cmd += ['--author', user]
1590 1590 if date:
1591 1591 # git's date parser silently ignores when seconds < 1e9
1592 1592 # convert to ISO8601
1593 1593 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1594 1594 '%Y-%m-%dT%H:%M:%S %1%2')
1595 1595 self._gitcommand(cmd, env=env)
1596 1596 # make sure commit works otherwise HEAD might not exist under certain
1597 1597 # circumstances
1598 1598 return self._gitstate()
1599 1599
1600 1600 @annotatesubrepoerror
1601 1601 def merge(self, state):
1602 1602 source, revision, kind = state
1603 1603 self._fetch(source, revision)
1604 1604 base = self._gitcommand(['merge-base', revision, self._state[1]])
1605 1605 self._gitupdatestat()
1606 1606 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1607 1607
1608 1608 def mergefunc():
1609 1609 if base == revision:
1610 1610 self.get(state) # fast forward merge
1611 1611 elif base != self._state[1]:
1612 1612 self._gitcommand(['merge', '--no-commit', revision])
1613 1613 _sanitize(self.ui, self.wvfs, '.git')
1614 1614
1615 1615 if self.dirty():
1616 1616 if self._gitstate() != revision:
1617 1617 dirty = self._gitstate() == self._state[1] or code != 0
1618 1618 if _updateprompt(self.ui, self, dirty,
1619 1619 self._state[1][:7], revision[:7]):
1620 1620 mergefunc()
1621 1621 else:
1622 1622 mergefunc()
1623 1623
1624 1624 @annotatesubrepoerror
1625 1625 def push(self, opts):
1626 1626 force = opts.get('force')
1627 1627
1628 1628 if not self._state[1]:
1629 1629 return True
1630 1630 if self._gitmissing():
1631 1631 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1632 1632 # if a branch in origin contains the revision, nothing to do
1633 1633 branch2rev, rev2branch = self._gitbranchmap()
1634 1634 if self._state[1] in rev2branch:
1635 1635 for b in rev2branch[self._state[1]]:
1636 1636 if b.startswith('refs/remotes/origin/'):
1637 1637 return True
1638 1638 for b, revision in branch2rev.iteritems():
1639 1639 if b.startswith('refs/remotes/origin/'):
1640 1640 if self._gitisancestor(self._state[1], revision):
1641 1641 return True
1642 1642 # otherwise, try to push the currently checked out branch
1643 1643 cmd = ['push']
1644 1644 if force:
1645 1645 cmd.append('--force')
1646 1646
1647 1647 current = self._gitcurrentbranch()
1648 1648 if current:
1649 1649 # determine if the current branch is even useful
1650 1650 if not self._gitisancestor(self._state[1], current):
1651 1651 self.ui.warn(_('unrelated git branch checked out '
1652 1652 'in subrepo %s\n') % self._relpath)
1653 1653 return False
1654 1654 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1655 1655 (current.split('/', 2)[2], self._relpath))
1656 1656 ret = self._gitdir(cmd + ['origin', current])
1657 1657 return ret[1] == 0
1658 1658 else:
1659 1659 self.ui.warn(_('no branch checked out in subrepo %s\n'
1660 1660 'cannot push revision %s\n') %
1661 1661 (self._relpath, self._state[1]))
1662 1662 return False
1663 1663
1664 1664 @annotatesubrepoerror
1665 1665 def add(self, ui, match, prefix, explicitonly, **opts):
1666 1666 if self._gitmissing():
1667 1667 return []
1668 1668
1669 1669 (modified, added, removed,
1670 1670 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1671 1671 clean=True)
1672 1672
1673 1673 tracked = set()
1674 1674 # dirstates 'amn' warn, 'r' is added again
1675 1675 for l in (modified, added, deleted, clean):
1676 1676 tracked.update(l)
1677 1677
1678 1678 # Unknown files not of interest will be rejected by the matcher
1679 1679 files = unknown
1680 1680 files.extend(match.files())
1681 1681
1682 1682 rejected = []
1683 1683
1684 1684 files = [f for f in sorted(set(files)) if match(f)]
1685 1685 for f in files:
1686 1686 exact = match.exact(f)
1687 1687 command = ["add"]
1688 1688 if exact:
1689 1689 command.append("-f") #should be added, even if ignored
1690 1690 if ui.verbose or not exact:
1691 1691 ui.status(_('adding %s\n') % match.rel(f))
1692 1692
1693 1693 if f in tracked: # hg prints 'adding' even if already tracked
1694 1694 if exact:
1695 1695 rejected.append(f)
1696 1696 continue
1697 1697 if not opts.get('dry_run'):
1698 1698 self._gitcommand(command + [f])
1699 1699
1700 1700 for f in rejected:
1701 1701 ui.warn(_("%s already tracked!\n") % match.abs(f))
1702 1702
1703 1703 return rejected
1704 1704
1705 1705 @annotatesubrepoerror
1706 1706 def remove(self):
1707 1707 if self._gitmissing():
1708 1708 return
1709 1709 if self.dirty():
1710 1710 self.ui.warn(_('not removing repo %s because '
1711 1711 'it has changes.\n') % self._relpath)
1712 1712 return
1713 1713 # we can't fully delete the repository as it may contain
1714 1714 # local-only history
1715 1715 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1716 1716 self._gitcommand(['config', 'core.bare', 'true'])
1717 1717 for f, kind in self.wvfs.readdir():
1718 1718 if f == '.git':
1719 1719 continue
1720 1720 if kind == stat.S_IFDIR:
1721 1721 self.wvfs.rmtree(f)
1722 1722 else:
1723 1723 self.wvfs.unlink(f)
1724 1724
1725 1725 def archive(self, archiver, prefix, match=None):
1726 1726 total = 0
1727 1727 source, revision = self._state
1728 1728 if not revision:
1729 1729 return total
1730 1730 self._fetch(source, revision)
1731 1731
1732 1732 # Parse git's native archive command.
1733 1733 # This should be much faster than manually traversing the trees
1734 1734 # and objects with many subprocess calls.
1735 1735 tarstream = self._gitcommand(['archive', revision], stream=True)
1736 1736 tar = tarfile.open(fileobj=tarstream, mode='r|')
1737 1737 relpath = subrelpath(self)
1738 1738 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1739 1739 for i, info in enumerate(tar):
1740 1740 if info.isdir():
1741 1741 continue
1742 1742 if match and not match(info.name):
1743 1743 continue
1744 1744 if info.issym():
1745 1745 data = info.linkname
1746 1746 else:
1747 1747 data = tar.extractfile(info).read()
1748 1748 archiver.addfile(prefix + self._path + '/' + info.name,
1749 1749 info.mode, info.issym(), data)
1750 1750 total += 1
1751 1751 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1752 1752 unit=_('files'))
1753 1753 self.ui.progress(_('archiving (%s)') % relpath, None)
1754 1754 return total
1755 1755
1756 1756
1757 1757 @annotatesubrepoerror
1758 1758 def cat(self, match, prefix, **opts):
1759 1759 rev = self._state[1]
1760 1760 if match.anypats():
1761 1761 return 1 #No support for include/exclude yet
1762 1762
1763 1763 if not match.files():
1764 1764 return 1
1765 1765
1766 1766 for f in match.files():
1767 1767 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1768 1768 fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
1769 1769 self._ctx.node(),
1770 1770 pathname=self.wvfs.reljoin(prefix, f))
1771 1771 fp.write(output)
1772 1772 fp.close()
1773 1773 return 0
1774 1774
1775 1775
1776 1776 @annotatesubrepoerror
1777 1777 def status(self, rev2, **opts):
1778 1778 rev1 = self._state[1]
1779 1779 if self._gitmissing() or not rev1:
1780 1780 # if the repo is missing, return no results
1781 1781 return scmutil.status([], [], [], [], [], [], [])
1782 1782 modified, added, removed = [], [], []
1783 1783 self._gitupdatestat()
1784 1784 if rev2:
1785 1785 command = ['diff-tree', '-r', rev1, rev2]
1786 1786 else:
1787 1787 command = ['diff-index', rev1]
1788 1788 out = self._gitcommand(command)
1789 1789 for line in out.split('\n'):
1790 1790 tab = line.find('\t')
1791 1791 if tab == -1:
1792 1792 continue
1793 1793 status, f = line[tab - 1], line[tab + 1:]
1794 1794 if status == 'M':
1795 1795 modified.append(f)
1796 1796 elif status == 'A':
1797 1797 added.append(f)
1798 1798 elif status == 'D':
1799 1799 removed.append(f)
1800 1800
1801 1801 deleted, unknown, ignored, clean = [], [], [], []
1802 1802
1803 1803 command = ['status', '--porcelain', '-z']
1804 1804 if opts.get('unknown'):
1805 1805 command += ['--untracked-files=all']
1806 1806 if opts.get('ignored'):
1807 1807 command += ['--ignored']
1808 1808 out = self._gitcommand(command)
1809 1809
1810 1810 changedfiles = set()
1811 1811 changedfiles.update(modified)
1812 1812 changedfiles.update(added)
1813 1813 changedfiles.update(removed)
1814 1814 for line in out.split('\0'):
1815 1815 if not line:
1816 1816 continue
1817 1817 st = line[0:2]
1818 1818 #moves and copies show 2 files on one line
1819 1819 if line.find('\0') >= 0:
1820 1820 filename1, filename2 = line[3:].split('\0')
1821 1821 else:
1822 1822 filename1 = line[3:]
1823 1823 filename2 = None
1824 1824
1825 1825 changedfiles.add(filename1)
1826 1826 if filename2:
1827 1827 changedfiles.add(filename2)
1828 1828
1829 1829 if st == '??':
1830 1830 unknown.append(filename1)
1831 1831 elif st == '!!':
1832 1832 ignored.append(filename1)
1833 1833
1834 1834 if opts.get('clean'):
1835 1835 out = self._gitcommand(['ls-files'])
1836 1836 for f in out.split('\n'):
1837 1837 if not f in changedfiles:
1838 1838 clean.append(f)
1839 1839
1840 1840 return scmutil.status(modified, added, removed, deleted,
1841 1841 unknown, ignored, clean)
1842 1842
1843 1843 @annotatesubrepoerror
1844 1844 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1845 1845 node1 = self._state[1]
1846 1846 cmd = ['diff']
1847 1847 if opts['stat']:
1848 1848 cmd.append('--stat')
1849 1849 else:
1850 1850 # for Git, this also implies '-p'
1851 1851 cmd.append('-U%d' % diffopts.context)
1852 1852
1853 1853 gitprefix = self.wvfs.reljoin(prefix, self._path)
1854 1854
1855 1855 if diffopts.noprefix:
1856 1856 cmd.extend(['--src-prefix=%s/' % gitprefix,
1857 1857 '--dst-prefix=%s/' % gitprefix])
1858 1858 else:
1859 1859 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1860 1860 '--dst-prefix=b/%s/' % gitprefix])
1861 1861
1862 1862 if diffopts.ignorews:
1863 1863 cmd.append('--ignore-all-space')
1864 1864 if diffopts.ignorewsamount:
1865 1865 cmd.append('--ignore-space-change')
1866 1866 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1867 1867 and diffopts.ignoreblanklines:
1868 1868 cmd.append('--ignore-blank-lines')
1869 1869
1870 1870 cmd.append(node1)
1871 1871 if node2:
1872 1872 cmd.append(node2)
1873 1873
1874 1874 output = ""
1875 1875 if match.always():
1876 1876 output += self._gitcommand(cmd) + '\n'
1877 1877 else:
1878 1878 st = self.status(node2)[:3]
1879 1879 files = [f for sublist in st for f in sublist]
1880 1880 for f in files:
1881 1881 if match(f):
1882 1882 output += self._gitcommand(cmd + ['--', f]) + '\n'
1883 1883
1884 1884 if output.strip():
1885 1885 ui.write(output)
1886 1886
1887 1887 @annotatesubrepoerror
1888 1888 def revert(self, substate, *pats, **opts):
1889 1889 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1890 1890 if not opts.get('no_backup'):
1891 1891 status = self.status(None)
1892 1892 names = status.modified
1893 1893 for name in names:
1894 1894 bakname = "%s.orig" % name
1895 1895 self.ui.note(_('saving current version of %s as %s\n') %
1896 1896 (name, bakname))
1897 1897 self.wvfs.rename(name, bakname)
1898 1898
1899 1899 if not opts.get('dry_run'):
1900 1900 self.get(substate, overwrite=True)
1901 1901 return []
1902 1902
1903 1903 def shortid(self, revid):
1904 1904 return revid[:7]
1905 1905
1906 1906 types = {
1907 1907 'hg': hgsubrepo,
1908 1908 'svn': svnsubrepo,
1909 1909 'git': gitsubrepo,
1910 1910 }
@@ -1,598 +1,694 b''
1 1 Preparing the subrepository 'sub2'
2 2
3 3 $ hg init sub2
4 4 $ echo sub2 > sub2/sub2
5 5 $ hg add -R sub2
6 6 adding sub2/sub2 (glob)
7 7 $ hg commit -R sub2 -m "sub2 import"
8 8
9 9 Preparing the 'sub1' repo which depends on the subrepo 'sub2'
10 10
11 11 $ hg init sub1
12 12 $ echo sub1 > sub1/sub1
13 13 $ echo "sub2 = ../sub2" > sub1/.hgsub
14 14 $ hg clone sub2 sub1/sub2
15 15 updating to branch default
16 16 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
17 17 $ hg add -R sub1
18 18 adding sub1/.hgsub (glob)
19 19 adding sub1/sub1 (glob)
20 20 $ hg commit -R sub1 -m "sub1 import"
21 21
22 22 Preparing the 'main' repo which depends on the subrepo 'sub1'
23 23
24 24 $ hg init main
25 25 $ echo main > main/main
26 26 $ echo "sub1 = ../sub1" > main/.hgsub
27 27 $ hg clone sub1 main/sub1
28 28 updating to branch default
29 29 cloning subrepo sub2 from $TESTTMP/sub2
30 30 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
31 31 $ hg add -R main
32 32 adding main/.hgsub (glob)
33 33 adding main/main (glob)
34 34 $ hg commit -R main -m "main import"
35 35
36 36 Cleaning both repositories, just as a clone -U
37 37
38 38 $ hg up -C -R sub2 null
39 39 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
40 40 $ hg up -C -R sub1 null
41 41 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
42 42 $ hg up -C -R main null
43 43 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
44 44 $ rm -rf main/sub1
45 45 $ rm -rf sub1/sub2
46 46
47 47 Clone main
48 48
49 49 $ hg --config extensions.largefiles= clone main cloned
50 50 updating to branch default
51 51 cloning subrepo sub1 from $TESTTMP/sub1
52 52 cloning subrepo sub1/sub2 from $TESTTMP/sub2 (glob)
53 53 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
54 54
55 55 Largefiles is NOT enabled in the clone if the source repo doesn't require it
56 56 $ cat cloned/.hg/hgrc
57 57 # example repository config (see "hg help config" for more info)
58 58 [paths]
59 59 default = $TESTTMP/main (glob)
60 60
61 61 # path aliases to other clones of this repo in URLs or filesystem paths
62 62 # (see "hg help config.paths" for more info)
63 63 #
64 64 # default-push = ssh://jdoe@example.net/hg/jdoes-fork
65 65 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
66 66 # my-clone = /home/jdoe/jdoes-clone
67 67
68 68 [ui]
69 69 # name and email (local to this repository, optional), e.g.
70 70 # username = Jane Doe <jdoe@example.com>
71 71
72 72 Checking cloned repo ids
73 73
74 74 $ printf "cloned " ; hg id -R cloned
75 75 cloned 7f491f53a367 tip
76 76 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
77 77 cloned/sub1 fc3b4ce2696f tip
78 78 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
79 79 cloned/sub1/sub2 c57a0840e3ba tip
80 80
81 81 debugsub output for main and sub1
82 82
83 83 $ hg debugsub -R cloned
84 84 path sub1
85 85 source ../sub1
86 86 revision fc3b4ce2696f7741438c79207583768f2ce6b0dd
87 87 $ hg debugsub -R cloned/sub1
88 88 path sub2
89 89 source ../sub2
90 90 revision c57a0840e3badd667ef3c3ef65471609acb2ba3c
91 91
92 92 Modifying deeply nested 'sub2'
93 93
94 94 $ echo modified > cloned/sub1/sub2/sub2
95 95 $ hg commit --subrepos -m "deep nested modif should trigger a commit" -R cloned
96 96 committing subrepository sub1
97 97 committing subrepository sub1/sub2 (glob)
98 98
99 99 Checking modified node ids
100 100
101 101 $ printf "cloned " ; hg id -R cloned
102 102 cloned ffe6649062fe tip
103 103 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
104 104 cloned/sub1 2ecb03bf44a9 tip
105 105 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
106 106 cloned/sub1/sub2 53dd3430bcaf tip
107 107
108 108 debugsub output for main and sub1
109 109
110 110 $ hg debugsub -R cloned
111 111 path sub1
112 112 source ../sub1
113 113 revision 2ecb03bf44a94e749e8669481dd9069526ce7cb9
114 114 $ hg debugsub -R cloned/sub1
115 115 path sub2
116 116 source ../sub2
117 117 revision 53dd3430bcaf5ab4a7c48262bcad6d441f510487
118 118
119 119 Check that deep archiving works
120 120
121 121 $ cd cloned
122 122 $ echo 'test' > sub1/sub2/test.txt
123 123 $ hg --config extensions.largefiles=! add sub1/sub2/test.txt
124 124 $ mkdir sub1/sub2/folder
125 125 $ echo 'subfolder' > sub1/sub2/folder/test.txt
126 126 $ hg ci -ASm "add test.txt"
127 127 adding sub1/sub2/folder/test.txt
128 128 committing subrepository sub1
129 129 committing subrepository sub1/sub2 (glob)
130 130
131 131 .. but first take a detour through some deep removal testing
132 132
133 133 $ hg remove -S -I 're:.*.txt' .
134 134 removing sub1/sub2/folder/test.txt (glob)
135 135 removing sub1/sub2/test.txt (glob)
136 136 $ hg status -S
137 137 R sub1/sub2/folder/test.txt
138 138 R sub1/sub2/test.txt
139 139 $ hg update -Cq
140 140 $ hg remove -I 're:.*.txt' sub1
141 141 $ hg status -S
142 142 $ hg remove sub1/sub2/folder/test.txt
143 143 $ hg remove sub1/.hgsubstate
144 144 $ mv sub1/.hgsub sub1/x.hgsub
145 145 $ hg status -S
146 146 warning: subrepo spec file 'sub1/.hgsub' not found (glob)
147 147 R sub1/.hgsubstate
148 148 R sub1/sub2/folder/test.txt
149 149 ! sub1/.hgsub
150 150 ? sub1/x.hgsub
151 151 $ mv sub1/x.hgsub sub1/.hgsub
152 152 $ hg update -Cq
153 153 $ touch sub1/foo
154 154 $ hg forget sub1/sub2/folder/test.txt
155 155 $ rm sub1/sub2/test.txt
156 156
157 157 Test relative path printing + subrepos
158 158 $ mkdir -p foo/bar
159 159 $ cd foo
160 160 $ touch bar/abc
161 161 $ hg addremove -S ..
162 162 adding ../sub1/sub2/folder/test.txt (glob)
163 163 removing ../sub1/sub2/test.txt (glob)
164 164 adding ../sub1/foo (glob)
165 165 adding bar/abc (glob)
166 166 $ cd ..
167 167 $ hg status -S
168 168 A foo/bar/abc
169 169 A sub1/foo
170 170 R sub1/sub2/test.txt
171
172 Archive wdir() with subrepos
173 $ hg rm main
174 $ hg archive -S -r 'wdir()' ../wdir
175 $ diff -r . ../wdir | grep -v '\.hg$'
176 Only in ../wdir: .hg_archival.txt
177
178 $ find ../wdir -type f | sort
179 ../wdir/.hg_archival.txt
180 ../wdir/.hgsub
181 ../wdir/.hgsubstate
182 ../wdir/foo/bar/abc
183 ../wdir/sub1/.hgsub
184 ../wdir/sub1/.hgsubstate
185 ../wdir/sub1/foo
186 ../wdir/sub1/sub1
187 ../wdir/sub1/sub2/folder/test.txt
188 ../wdir/sub1/sub2/sub2
189
190 Attempting to archive 'wdir()' with a missing file is handled gracefully
191 $ rm sub1/sub1
192 $ rm -r ../wdir
193 $ hg archive -v -S -r 'wdir()' ../wdir
194 $ find ../wdir -type f | sort
195 ../wdir/.hg_archival.txt
196 ../wdir/.hgsub
197 ../wdir/.hgsubstate
198 ../wdir/foo/bar/abc
199 ../wdir/sub1/.hgsub
200 ../wdir/sub1/.hgsubstate
201 ../wdir/sub1/foo
202 ../wdir/sub1/sub2/folder/test.txt
203 ../wdir/sub1/sub2/sub2
204
205 Continue relative path printing + subrepos
171 206 $ hg update -Cq
172 207 $ touch sub1/sub2/folder/bar
173 208 $ hg addremove sub1/sub2
174 209 adding sub1/sub2/folder/bar (glob)
175 210 $ hg status -S
176 211 A sub1/sub2/folder/bar
177 212 ? foo/bar/abc
178 213 ? sub1/foo
179 214 $ hg update -Cq
180 215 $ hg addremove sub1
181 216 adding sub1/sub2/folder/bar (glob)
182 217 adding sub1/foo (glob)
183 218 $ hg update -Cq
184 219 $ rm sub1/sub2/folder/test.txt
185 220 $ rm sub1/sub2/test.txt
186 221 $ hg ci -ASm "remove test.txt"
187 222 adding sub1/sub2/folder/bar
188 223 removing sub1/sub2/folder/test.txt
189 224 removing sub1/sub2/test.txt
190 225 adding sub1/foo
191 226 adding foo/bar/abc
192 227 committing subrepository sub1
193 228 committing subrepository sub1/sub2 (glob)
194 229
195 230 $ hg forget sub1/sub2/sub2
196 231 $ echo x > sub1/sub2/x.txt
197 232 $ hg add sub1/sub2/x.txt
198 233
199 234 Files sees uncommitted adds and removes in subrepos
200 235 $ hg files -S
201 236 .hgsub
202 237 .hgsubstate
203 238 foo/bar/abc (glob)
204 239 main
205 240 sub1/.hgsub (glob)
206 241 sub1/.hgsubstate (glob)
207 242 sub1/foo (glob)
208 243 sub1/sub1 (glob)
209 244 sub1/sub2/folder/bar (glob)
210 245 sub1/sub2/x.txt (glob)
211 246
212 247 $ hg files -S "set:eol('dos') or eol('unix') or size('<= 0')"
213 248 .hgsub
214 249 .hgsubstate
215 250 foo/bar/abc (glob)
216 251 main
217 252 sub1/.hgsub (glob)
218 253 sub1/.hgsubstate (glob)
219 254 sub1/foo (glob)
220 255 sub1/sub1 (glob)
221 256 sub1/sub2/folder/bar (glob)
222 257 sub1/sub2/x.txt (glob)
223 258
224 259 $ hg files -r '.^' -S "set:eol('dos') or eol('unix')"
225 260 .hgsub
226 261 .hgsubstate
227 262 main
228 263 sub1/.hgsub (glob)
229 264 sub1/.hgsubstate (glob)
230 265 sub1/sub1 (glob)
231 266 sub1/sub2/folder/test.txt (glob)
232 267 sub1/sub2/sub2 (glob)
233 268 sub1/sub2/test.txt (glob)
234 269
235 270 $ hg files sub1
236 271 sub1/.hgsub (glob)
237 272 sub1/.hgsubstate (glob)
238 273 sub1/foo (glob)
239 274 sub1/sub1 (glob)
240 275 sub1/sub2/folder/bar (glob)
241 276 sub1/sub2/x.txt (glob)
242 277
243 278 $ hg files sub1/sub2
244 279 sub1/sub2/folder/bar (glob)
245 280 sub1/sub2/x.txt (glob)
246 281
247 282 $ hg files -S -r '.^' sub1/sub2/folder
248 283 sub1/sub2/folder/test.txt (glob)
249 284
250 285 $ hg files -S -r '.^' sub1/sub2/missing
251 286 sub1/sub2/missing: no such file in rev 78026e779ea6 (glob)
252 287 [1]
253 288
254 289 $ hg files -r '.^' sub1/
255 290 sub1/.hgsub (glob)
256 291 sub1/.hgsubstate (glob)
257 292 sub1/sub1 (glob)
258 293 sub1/sub2/folder/test.txt (glob)
259 294 sub1/sub2/sub2 (glob)
260 295 sub1/sub2/test.txt (glob)
261 296
262 297 $ hg files -r '.^' sub1/sub2
263 298 sub1/sub2/folder/test.txt (glob)
264 299 sub1/sub2/sub2 (glob)
265 300 sub1/sub2/test.txt (glob)
266 301
267 302 $ hg rollback -q
268 303 $ hg up -Cq
269 304
270 305 $ hg --config extensions.largefiles=! archive -S ../archive_all
271 306 $ find ../archive_all | sort
272 307 ../archive_all
273 308 ../archive_all/.hg_archival.txt
274 309 ../archive_all/.hgsub
275 310 ../archive_all/.hgsubstate
276 311 ../archive_all/main
277 312 ../archive_all/sub1
278 313 ../archive_all/sub1/.hgsub
279 314 ../archive_all/sub1/.hgsubstate
280 315 ../archive_all/sub1/sub1
281 316 ../archive_all/sub1/sub2
282 317 ../archive_all/sub1/sub2/folder
283 318 ../archive_all/sub1/sub2/folder/test.txt
284 319 ../archive_all/sub1/sub2/sub2
285 320 ../archive_all/sub1/sub2/test.txt
286 321
287 322 Check that archive -X works in deep subrepos
288 323
289 324 $ hg --config extensions.largefiles=! archive -S -X '**test*' ../archive_exclude
290 325 $ find ../archive_exclude | sort
291 326 ../archive_exclude
292 327 ../archive_exclude/.hg_archival.txt
293 328 ../archive_exclude/.hgsub
294 329 ../archive_exclude/.hgsubstate
295 330 ../archive_exclude/main
296 331 ../archive_exclude/sub1
297 332 ../archive_exclude/sub1/.hgsub
298 333 ../archive_exclude/sub1/.hgsubstate
299 334 ../archive_exclude/sub1/sub1
300 335 ../archive_exclude/sub1/sub2
301 336 ../archive_exclude/sub1/sub2/sub2
302 337
303 338 $ hg --config extensions.largefiles=! archive -S -I '**test*' ../archive_include
304 339 $ find ../archive_include | sort
305 340 ../archive_include
306 341 ../archive_include/sub1
307 342 ../archive_include/sub1/sub2
308 343 ../archive_include/sub1/sub2/folder
309 344 ../archive_include/sub1/sub2/folder/test.txt
310 345 ../archive_include/sub1/sub2/test.txt
311 346
312 347 Check that deep archive works with largefiles (which overrides hgsubrepo impl)
313 348 This also tests the repo.ui regression in 43fb170a23bd, and that lf subrepo
314 349 subrepos are archived properly.
315 350 Note that add --large through a subrepo currently adds the file as a normal file
316 351
317 352 $ echo "large" > sub1/sub2/large.bin
318 353 $ hg --config extensions.largefiles= add --large -R sub1/sub2 sub1/sub2/large.bin
319 354 $ echo "large" > large.bin
320 355 $ hg --config extensions.largefiles= add --large large.bin
321 356 $ hg --config extensions.largefiles= ci -S -m "add large files"
322 357 committing subrepository sub1
323 358 committing subrepository sub1/sub2 (glob)
324 359
325 360 $ hg --config extensions.largefiles= archive -S ../archive_lf
326 361 $ find ../archive_lf | sort
327 362 ../archive_lf
328 363 ../archive_lf/.hg_archival.txt
329 364 ../archive_lf/.hgsub
330 365 ../archive_lf/.hgsubstate
331 366 ../archive_lf/large.bin
332 367 ../archive_lf/main
333 368 ../archive_lf/sub1
334 369 ../archive_lf/sub1/.hgsub
335 370 ../archive_lf/sub1/.hgsubstate
336 371 ../archive_lf/sub1/sub1
337 372 ../archive_lf/sub1/sub2
338 373 ../archive_lf/sub1/sub2/folder
339 374 ../archive_lf/sub1/sub2/folder/test.txt
340 375 ../archive_lf/sub1/sub2/large.bin
341 376 ../archive_lf/sub1/sub2/sub2
342 377 ../archive_lf/sub1/sub2/test.txt
343 378 $ rm -rf ../archive_lf
344 379
345 380 Exclude large files from main and sub-sub repo
346 381
347 382 $ hg --config extensions.largefiles= archive -S -X '**.bin' ../archive_lf
348 383 $ find ../archive_lf | sort
349 384 ../archive_lf
350 385 ../archive_lf/.hg_archival.txt
351 386 ../archive_lf/.hgsub
352 387 ../archive_lf/.hgsubstate
353 388 ../archive_lf/main
354 389 ../archive_lf/sub1
355 390 ../archive_lf/sub1/.hgsub
356 391 ../archive_lf/sub1/.hgsubstate
357 392 ../archive_lf/sub1/sub1
358 393 ../archive_lf/sub1/sub2
359 394 ../archive_lf/sub1/sub2/folder
360 395 ../archive_lf/sub1/sub2/folder/test.txt
361 396 ../archive_lf/sub1/sub2/sub2
362 397 ../archive_lf/sub1/sub2/test.txt
363 398 $ rm -rf ../archive_lf
364 399
365 400 Exclude normal files from main and sub-sub repo
366 401
367 402 $ hg --config extensions.largefiles= archive -S -X '**.txt' -p '.' ../archive_lf.tgz
368 403 $ tar -tzf ../archive_lf.tgz | sort
369 404 .hgsub
370 405 .hgsubstate
371 406 large.bin
372 407 main
373 408 sub1/.hgsub
374 409 sub1/.hgsubstate
375 410 sub1/sub1
376 411 sub1/sub2/large.bin
377 412 sub1/sub2/sub2
378 413
379 414 Include normal files from within a largefiles subrepo
380 415
381 416 $ hg --config extensions.largefiles= archive -S -I '**.txt' ../archive_lf
382 417 $ find ../archive_lf | sort
383 418 ../archive_lf
384 419 ../archive_lf/.hg_archival.txt
385 420 ../archive_lf/sub1
386 421 ../archive_lf/sub1/sub2
387 422 ../archive_lf/sub1/sub2/folder
388 423 ../archive_lf/sub1/sub2/folder/test.txt
389 424 ../archive_lf/sub1/sub2/test.txt
390 425 $ rm -rf ../archive_lf
391 426
392 427 Include large files from within a largefiles subrepo
393 428
394 429 $ hg --config extensions.largefiles= archive -S -I '**.bin' ../archive_lf
395 430 $ find ../archive_lf | sort
396 431 ../archive_lf
397 432 ../archive_lf/large.bin
398 433 ../archive_lf/sub1
399 434 ../archive_lf/sub1/sub2
400 435 ../archive_lf/sub1/sub2/large.bin
401 436 $ rm -rf ../archive_lf
402 437
403 438 Find an exact largefile match in a largefiles subrepo
404 439
405 440 $ hg --config extensions.largefiles= archive -S -I 'sub1/sub2/large.bin' ../archive_lf
406 441 $ find ../archive_lf | sort
407 442 ../archive_lf
408 443 ../archive_lf/sub1
409 444 ../archive_lf/sub1/sub2
410 445 ../archive_lf/sub1/sub2/large.bin
411 446 $ rm -rf ../archive_lf
412 447
413 448 The local repo enables largefiles if a largefiles repo is cloned
414 449 $ hg showconfig extensions
415 450 abort: repository requires features unknown to this Mercurial: largefiles!
416 451 (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
417 452 [255]
418 453 $ hg --config extensions.largefiles= clone -qU . ../lfclone
419 454 $ cat ../lfclone/.hg/hgrc
420 455 # example repository config (see "hg help config" for more info)
421 456 [paths]
422 457 default = $TESTTMP/cloned (glob)
423 458
424 459 # path aliases to other clones of this repo in URLs or filesystem paths
425 460 # (see "hg help config.paths" for more info)
426 461 #
427 462 # default-push = ssh://jdoe@example.net/hg/jdoes-fork
428 463 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
429 464 # my-clone = /home/jdoe/jdoes-clone
430 465
431 466 [ui]
432 467 # name and email (local to this repository, optional), e.g.
433 468 # username = Jane Doe <jdoe@example.com>
434 469
435 470 [extensions]
436 471 largefiles=
437 472
438 473 Find an exact match to a standin (should archive nothing)
439 474 $ hg --config extensions.largefiles= archive -S -I 'sub/sub2/.hglf/large.bin' ../archive_lf
440 475 $ find ../archive_lf 2> /dev/null | sort
441 476
442 477 $ cat >> $HGRCPATH <<EOF
443 478 > [extensions]
444 479 > largefiles=
445 480 > [largefiles]
446 481 > patterns=glob:**.dat
447 482 > EOF
448 483
449 484 Test forget through a deep subrepo with the largefiles extension, both a
450 485 largefile and a normal file. Then a largefile that hasn't been committed yet.
451 486 $ touch sub1/sub2/untracked.txt
452 487 $ touch sub1/sub2/large.dat
453 488 $ hg forget sub1/sub2/large.bin sub1/sub2/test.txt sub1/sub2/untracked.txt
454 489 not removing sub1/sub2/untracked.txt: file is already untracked (glob)
455 490 [1]
456 491 $ hg add --large --dry-run -v sub1/sub2/untracked.txt
457 492 adding sub1/sub2/untracked.txt as a largefile (glob)
458 493 $ hg add --large -v sub1/sub2/untracked.txt
459 494 adding sub1/sub2/untracked.txt as a largefile (glob)
460 495 $ hg add --normal -v sub1/sub2/large.dat
461 496 adding sub1/sub2/large.dat (glob)
462 497 $ hg forget -v sub1/sub2/untracked.txt
463 498 removing sub1/sub2/untracked.txt (glob)
464 499 $ hg status -S
465 500 A sub1/sub2/large.dat
466 501 R sub1/sub2/large.bin
467 502 R sub1/sub2/test.txt
468 503 ? foo/bar/abc
469 504 ? sub1/sub2/untracked.txt
470 505 ? sub1/sub2/x.txt
471 506 $ hg add sub1/sub2
507
508 $ hg archive -S -r 'wdir()' ../wdir2
509 $ diff -r . ../wdir2 | grep -v '\.hg$'
510 Only in ../wdir2: .hg_archival.txt
511 Only in .: .hglf
512 Only in .: foo
513 Only in ./sub1/sub2: large.bin
514 Only in ./sub1/sub2: test.txt
515 Only in ./sub1/sub2: untracked.txt
516 Only in ./sub1/sub2: x.txt
517 $ find ../wdir2 -type f | sort
518 ../wdir2/.hg_archival.txt
519 ../wdir2/.hgsub
520 ../wdir2/.hgsubstate
521 ../wdir2/large.bin
522 ../wdir2/main
523 ../wdir2/sub1/.hgsub
524 ../wdir2/sub1/.hgsubstate
525 ../wdir2/sub1/sub1
526 ../wdir2/sub1/sub2/folder/test.txt
527 ../wdir2/sub1/sub2/large.dat
528 ../wdir2/sub1/sub2/sub2
529 $ hg status -S -mac -n | sort
530 .hgsub
531 .hgsubstate
532 large.bin
533 main
534 sub1/.hgsub
535 sub1/.hgsubstate
536 sub1/sub1
537 sub1/sub2/folder/test.txt
538 sub1/sub2/large.dat
539 sub1/sub2/sub2
540
472 541 $ hg ci -Sqm 'forget testing'
473 542
543 Test 'wdir()' modified file archiving with largefiles
544 $ echo 'mod' > main
545 $ echo 'mod' > large.bin
546 $ echo 'mod' > sub1/sub2/large.dat
547 $ hg archive -S -r 'wdir()' ../wdir3
548 $ diff -r . ../wdir3 | grep -v '\.hg$'
549 Only in ../wdir3: .hg_archival.txt
550 Only in .: .hglf
551 Only in .: foo
552 Only in ./sub1/sub2: large.bin
553 Only in ./sub1/sub2: test.txt
554 Only in ./sub1/sub2: untracked.txt
555 Only in ./sub1/sub2: x.txt
556 $ find ../wdir3 -type f | sort
557 ../wdir3/.hg_archival.txt
558 ../wdir3/.hgsub
559 ../wdir3/.hgsubstate
560 ../wdir3/large.bin
561 ../wdir3/main
562 ../wdir3/sub1/.hgsub
563 ../wdir3/sub1/.hgsubstate
564 ../wdir3/sub1/sub1
565 ../wdir3/sub1/sub2/folder/test.txt
566 ../wdir3/sub1/sub2/large.dat
567 ../wdir3/sub1/sub2/sub2
568 $ hg up -Cq
569
474 570 Test issue4330: commit a directory where only normal files have changed
475 571 $ touch foo/bar/large.dat
476 572 $ hg add --large foo/bar/large.dat
477 573 $ hg ci -m 'add foo/bar/large.dat'
478 574 $ touch a.txt
479 575 $ touch a.dat
480 576 $ hg add -v foo/bar/abc a.txt a.dat
481 577 adding a.dat as a largefile
482 578 adding a.txt
483 579 adding foo/bar/abc (glob)
484 580 $ hg ci -m 'dir commit with only normal file deltas' foo/bar
485 581 $ hg status
486 582 A a.dat
487 583 A a.txt
488 584
489 585 Test a directory commit with a changed largefile and a changed normal file
490 586 $ echo changed > foo/bar/large.dat
491 587 $ echo changed > foo/bar/abc
492 588 $ hg ci -m 'dir commit with normal and lf file deltas' foo
493 589 $ hg status
494 590 A a.dat
495 591 A a.txt
496 592
497 593 $ hg ci -m "add a.*"
498 594 $ hg mv a.dat b.dat
499 595 $ hg mv foo/bar/abc foo/bar/def
500 596 $ hg status -C
501 597 A b.dat
502 598 a.dat
503 599 A foo/bar/def
504 600 foo/bar/abc
505 601 R a.dat
506 602 R foo/bar/abc
507 603
508 604 $ hg ci -m "move large and normal"
509 605 $ hg status -C --rev '.^' --rev .
510 606 A b.dat
511 607 a.dat
512 608 A foo/bar/def
513 609 foo/bar/abc
514 610 R a.dat
515 611 R foo/bar/abc
516 612
517 613
518 614 $ echo foo > main
519 615 $ hg ci -m "mod parent only"
520 616 $ hg init sub3
521 617 $ echo "sub3 = sub3" >> .hgsub
522 618 $ echo xyz > sub3/a.txt
523 619 $ hg add sub3/a.txt
524 620 $ hg ci -Sm "add sub3"
525 621 committing subrepository sub3
526 622 $ cat .hgsub | grep -v sub3 > .hgsub1
527 623 $ mv .hgsub1 .hgsub
528 624 $ hg ci -m "remove sub3"
529 625
530 626 $ hg log -r "subrepo()" --style compact
531 627 0 7f491f53a367 1970-01-01 00:00 +0000 test
532 628 main import
533 629
534 630 1 ffe6649062fe 1970-01-01 00:00 +0000 test
535 631 deep nested modif should trigger a commit
536 632
537 633 2 9bb10eebee29 1970-01-01 00:00 +0000 test
538 634 add test.txt
539 635
540 636 3 7c64f035294f 1970-01-01 00:00 +0000 test
541 637 add large files
542 638
543 639 4 f734a59e2e35 1970-01-01 00:00 +0000 test
544 640 forget testing
545 641
546 642 11 9685a22af5db 1970-01-01 00:00 +0000 test
547 643 add sub3
548 644
549 645 12[tip] 2e0485b475b9 1970-01-01 00:00 +0000 test
550 646 remove sub3
551 647
552 648 $ hg log -r "subrepo('sub3')" --style compact
553 649 11 9685a22af5db 1970-01-01 00:00 +0000 test
554 650 add sub3
555 651
556 652 12[tip] 2e0485b475b9 1970-01-01 00:00 +0000 test
557 653 remove sub3
558 654
559 655 $ hg log -r "subrepo('bogus')" --style compact
560 656
561 657
562 658 Test .hgsubstate in the R state
563 659
564 660 $ hg rm .hgsub .hgsubstate
565 661 $ hg ci -m 'trash subrepo tracking'
566 662
567 663 $ hg log -r "subrepo('re:sub\d+')" --style compact
568 664 0 7f491f53a367 1970-01-01 00:00 +0000 test
569 665 main import
570 666
571 667 1 ffe6649062fe 1970-01-01 00:00 +0000 test
572 668 deep nested modif should trigger a commit
573 669
574 670 2 9bb10eebee29 1970-01-01 00:00 +0000 test
575 671 add test.txt
576 672
577 673 3 7c64f035294f 1970-01-01 00:00 +0000 test
578 674 add large files
579 675
580 676 4 f734a59e2e35 1970-01-01 00:00 +0000 test
581 677 forget testing
582 678
583 679 11 9685a22af5db 1970-01-01 00:00 +0000 test
584 680 add sub3
585 681
586 682 12 2e0485b475b9 1970-01-01 00:00 +0000 test
587 683 remove sub3
588 684
589 685 13[tip] a68b2c361653 1970-01-01 00:00 +0000 test
590 686 trash subrepo tracking
591 687
592 688
593 689 Restore the trashed subrepo tracking
594 690
595 691 $ hg rollback -q
596 692 $ hg update -Cq .
597 693
598 694 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now