##// END OF EJS Templates
subrepo: run the repo decoders when archiving...
Matt Harbison -
r31099:b44ab288 default
parent child Browse files
Show More
@@ -1,1439 +1,1441 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10 10 from __future__ import absolute_import
11 11
12 12 import copy
13 13 import os
14 14
15 15 from mercurial.i18n import _
16 16
17 17 from mercurial import (
18 18 archival,
19 19 cmdutil,
20 20 error,
21 21 hg,
22 22 match as matchmod,
23 23 pathutil,
24 24 registrar,
25 25 scmutil,
26 26 smartset,
27 27 util,
28 28 )
29 29
30 30 from . import (
31 31 lfcommands,
32 32 lfutil,
33 33 storefactory,
34 34 )
35 35
36 36 # -- Utility functions: commonly/repeatedly needed functionality ---------------
37 37
38 38 def composelargefilematcher(match, manifest):
39 39 '''create a matcher that matches only the largefiles in the original
40 40 matcher'''
41 41 m = copy.copy(match)
42 42 lfile = lambda f: lfutil.standin(f) in manifest
43 43 m._files = filter(lfile, m._files)
44 44 m._fileroots = set(m._files)
45 45 m._always = False
46 46 origmatchfn = m.matchfn
47 47 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
48 48 return m
49 49
50 50 def composenormalfilematcher(match, manifest, exclude=None):
51 51 excluded = set()
52 52 if exclude is not None:
53 53 excluded.update(exclude)
54 54
55 55 m = copy.copy(match)
56 56 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
57 57 manifest or f in excluded)
58 58 m._files = filter(notlfile, m._files)
59 59 m._fileroots = set(m._files)
60 60 m._always = False
61 61 origmatchfn = m.matchfn
62 62 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
63 63 return m
64 64
65 65 def installnormalfilesmatchfn(manifest):
66 66 '''installmatchfn with a matchfn that ignores all largefiles'''
67 67 def overridematch(ctx, pats=(), opts=None, globbed=False,
68 68 default='relpath', badfn=None):
69 69 if opts is None:
70 70 opts = {}
71 71 match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn)
72 72 return composenormalfilematcher(match, manifest)
73 73 oldmatch = installmatchfn(overridematch)
74 74
75 75 def installmatchfn(f):
76 76 '''monkey patch the scmutil module with a custom match function.
77 77 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
78 78 oldmatch = scmutil.match
79 79 setattr(f, 'oldmatch', oldmatch)
80 80 scmutil.match = f
81 81 return oldmatch
82 82
83 83 def restorematchfn():
84 84 '''restores scmutil.match to what it was before installmatchfn
85 85 was called. no-op if scmutil.match is its original function.
86 86
87 87 Note that n calls to installmatchfn will require n calls to
88 88 restore the original matchfn.'''
89 89 scmutil.match = getattr(scmutil.match, 'oldmatch')
90 90
91 91 def installmatchandpatsfn(f):
92 92 oldmatchandpats = scmutil.matchandpats
93 93 setattr(f, 'oldmatchandpats', oldmatchandpats)
94 94 scmutil.matchandpats = f
95 95 return oldmatchandpats
96 96
97 97 def restorematchandpatsfn():
98 98 '''restores scmutil.matchandpats to what it was before
99 99 installmatchandpatsfn was called. No-op if scmutil.matchandpats
100 100 is its original function.
101 101
102 102 Note that n calls to installmatchandpatsfn will require n calls
103 103 to restore the original matchfn.'''
104 104 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
105 105 scmutil.matchandpats)
106 106
107 107 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
108 108 large = opts.get('large')
109 109 lfsize = lfutil.getminsize(
110 110 ui, lfutil.islfilesrepo(repo), opts.get('lfsize'))
111 111
112 112 lfmatcher = None
113 113 if lfutil.islfilesrepo(repo):
114 114 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
115 115 if lfpats:
116 116 lfmatcher = matchmod.match(repo.root, '', list(lfpats))
117 117
118 118 lfnames = []
119 119 m = matcher
120 120
121 121 wctx = repo[None]
122 122 for f in repo.walk(matchmod.badmatch(m, lambda x, y: None)):
123 123 exact = m.exact(f)
124 124 lfile = lfutil.standin(f) in wctx
125 125 nfile = f in wctx
126 126 exists = lfile or nfile
127 127
128 128 # addremove in core gets fancy with the name, add doesn't
129 129 if isaddremove:
130 130 name = m.uipath(f)
131 131 else:
132 132 name = m.rel(f)
133 133
134 134 # Don't warn the user when they attempt to add a normal tracked file.
135 135 # The normal add code will do that for us.
136 136 if exact and exists:
137 137 if lfile:
138 138 ui.warn(_('%s already a largefile\n') % name)
139 139 continue
140 140
141 141 if (exact or not exists) and not lfutil.isstandin(f):
142 142 # In case the file was removed previously, but not committed
143 143 # (issue3507)
144 144 if not repo.wvfs.exists(f):
145 145 continue
146 146
147 147 abovemin = (lfsize and
148 148 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
149 149 if large or abovemin or (lfmatcher and lfmatcher(f)):
150 150 lfnames.append(f)
151 151 if ui.verbose or not exact:
152 152 ui.status(_('adding %s as a largefile\n') % name)
153 153
154 154 bad = []
155 155
156 156 # Need to lock, otherwise there could be a race condition between
157 157 # when standins are created and added to the repo.
158 158 with repo.wlock():
159 159 if not opts.get('dry_run'):
160 160 standins = []
161 161 lfdirstate = lfutil.openlfdirstate(ui, repo)
162 162 for f in lfnames:
163 163 standinname = lfutil.standin(f)
164 164 lfutil.writestandin(repo, standinname, hash='',
165 165 executable=lfutil.getexecutable(repo.wjoin(f)))
166 166 standins.append(standinname)
167 167 if lfdirstate[f] == 'r':
168 168 lfdirstate.normallookup(f)
169 169 else:
170 170 lfdirstate.add(f)
171 171 lfdirstate.write()
172 172 bad += [lfutil.splitstandin(f)
173 173 for f in repo[None].add(standins)
174 174 if f in m.files()]
175 175
176 176 added = [f for f in lfnames if f not in bad]
177 177 return added, bad
178 178
179 179 def removelargefiles(ui, repo, isaddremove, matcher, **opts):
180 180 after = opts.get('after')
181 181 m = composelargefilematcher(matcher, repo[None].manifest())
182 182 try:
183 183 repo.lfstatus = True
184 184 s = repo.status(match=m, clean=not isaddremove)
185 185 finally:
186 186 repo.lfstatus = False
187 187 manifest = repo[None].manifest()
188 188 modified, added, deleted, clean = [[f for f in list
189 189 if lfutil.standin(f) in manifest]
190 190 for list in (s.modified, s.added,
191 191 s.deleted, s.clean)]
192 192
193 193 def warn(files, msg):
194 194 for f in files:
195 195 ui.warn(msg % m.rel(f))
196 196 return int(len(files) > 0)
197 197
198 198 result = 0
199 199
200 200 if after:
201 201 remove = deleted
202 202 result = warn(modified + added + clean,
203 203 _('not removing %s: file still exists\n'))
204 204 else:
205 205 remove = deleted + clean
206 206 result = warn(modified, _('not removing %s: file is modified (use -f'
207 207 ' to force removal)\n'))
208 208 result = warn(added, _('not removing %s: file has been marked for add'
209 209 ' (use forget to undo)\n')) or result
210 210
211 211 # Need to lock because standin files are deleted then removed from the
212 212 # repository and we could race in-between.
213 213 with repo.wlock():
214 214 lfdirstate = lfutil.openlfdirstate(ui, repo)
215 215 for f in sorted(remove):
216 216 if ui.verbose or not m.exact(f):
217 217 # addremove in core gets fancy with the name, remove doesn't
218 218 if isaddremove:
219 219 name = m.uipath(f)
220 220 else:
221 221 name = m.rel(f)
222 222 ui.status(_('removing %s\n') % name)
223 223
224 224 if not opts.get('dry_run'):
225 225 if not after:
226 226 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
227 227
228 228 if opts.get('dry_run'):
229 229 return result
230 230
231 231 remove = [lfutil.standin(f) for f in remove]
232 232 # If this is being called by addremove, let the original addremove
233 233 # function handle this.
234 234 if not isaddremove:
235 235 for f in remove:
236 236 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
237 237 repo[None].forget(remove)
238 238
239 239 for f in remove:
240 240 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
241 241 False)
242 242
243 243 lfdirstate.write()
244 244
245 245 return result
246 246
247 247 # For overriding mercurial.hgweb.webcommands so that largefiles will
248 248 # appear at their right place in the manifests.
249 249 def decodepath(orig, path):
250 250 return lfutil.splitstandin(path) or path
251 251
252 252 # -- Wrappers: modify existing commands --------------------------------
253 253
254 254 def overrideadd(orig, ui, repo, *pats, **opts):
255 255 if opts.get('normal') and opts.get('large'):
256 256 raise error.Abort(_('--normal cannot be used with --large'))
257 257 return orig(ui, repo, *pats, **opts)
258 258
259 259 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
260 260 # The --normal flag short circuits this override
261 261 if opts.get('normal'):
262 262 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
263 263
264 264 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
265 265 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
266 266 ladded)
267 267 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
268 268
269 269 bad.extend(f for f in lbad)
270 270 return bad
271 271
272 272 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos):
273 273 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
274 274 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos)
275 275 return removelargefiles(ui, repo, False, matcher, after=after,
276 276 force=force) or result
277 277
278 278 def overridestatusfn(orig, repo, rev2, **opts):
279 279 try:
280 280 repo._repo.lfstatus = True
281 281 return orig(repo, rev2, **opts)
282 282 finally:
283 283 repo._repo.lfstatus = False
284 284
285 285 def overridestatus(orig, ui, repo, *pats, **opts):
286 286 try:
287 287 repo.lfstatus = True
288 288 return orig(ui, repo, *pats, **opts)
289 289 finally:
290 290 repo.lfstatus = False
291 291
292 292 def overridedirty(orig, repo, ignoreupdate=False):
293 293 try:
294 294 repo._repo.lfstatus = True
295 295 return orig(repo, ignoreupdate)
296 296 finally:
297 297 repo._repo.lfstatus = False
298 298
299 299 def overridelog(orig, ui, repo, *pats, **opts):
300 300 def overridematchandpats(ctx, pats=(), opts=None, globbed=False,
301 301 default='relpath', badfn=None):
302 302 """Matcher that merges root directory with .hglf, suitable for log.
303 303 It is still possible to match .hglf directly.
304 304 For any listed files run log on the standin too.
305 305 matchfn tries both the given filename and with .hglf stripped.
306 306 """
307 307 if opts is None:
308 308 opts = {}
309 309 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default,
310 310 badfn=badfn)
311 311 m, p = copy.copy(matchandpats)
312 312
313 313 if m.always():
314 314 # We want to match everything anyway, so there's no benefit trying
315 315 # to add standins.
316 316 return matchandpats
317 317
318 318 pats = set(p)
319 319
320 320 def fixpats(pat, tostandin=lfutil.standin):
321 321 if pat.startswith('set:'):
322 322 return pat
323 323
324 324 kindpat = matchmod._patsplit(pat, None)
325 325
326 326 if kindpat[0] is not None:
327 327 return kindpat[0] + ':' + tostandin(kindpat[1])
328 328 return tostandin(kindpat[1])
329 329
330 330 if m._cwd:
331 331 hglf = lfutil.shortname
332 332 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
333 333
334 334 def tostandin(f):
335 335 # The file may already be a standin, so truncate the back
336 336 # prefix and test before mangling it. This avoids turning
337 337 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
338 338 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
339 339 return f
340 340
341 341 # An absolute path is from outside the repo, so truncate the
342 342 # path to the root before building the standin. Otherwise cwd
343 343 # is somewhere in the repo, relative to root, and needs to be
344 344 # prepended before building the standin.
345 345 if os.path.isabs(m._cwd):
346 346 f = f[len(back):]
347 347 else:
348 348 f = m._cwd + '/' + f
349 349 return back + lfutil.standin(f)
350 350
351 351 pats.update(fixpats(f, tostandin) for f in p)
352 352 else:
353 353 def tostandin(f):
354 354 if lfutil.splitstandin(f):
355 355 return f
356 356 return lfutil.standin(f)
357 357 pats.update(fixpats(f, tostandin) for f in p)
358 358
359 359 for i in range(0, len(m._files)):
360 360 # Don't add '.hglf' to m.files, since that is already covered by '.'
361 361 if m._files[i] == '.':
362 362 continue
363 363 standin = lfutil.standin(m._files[i])
364 364 # If the "standin" is a directory, append instead of replace to
365 365 # support naming a directory on the command line with only
366 366 # largefiles. The original directory is kept to support normal
367 367 # files.
368 368 if standin in repo[ctx.node()]:
369 369 m._files[i] = standin
370 370 elif m._files[i] not in repo[ctx.node()] \
371 371 and repo.wvfs.isdir(standin):
372 372 m._files.append(standin)
373 373
374 374 m._fileroots = set(m._files)
375 375 m._always = False
376 376 origmatchfn = m.matchfn
377 377 def lfmatchfn(f):
378 378 lf = lfutil.splitstandin(f)
379 379 if lf is not None and origmatchfn(lf):
380 380 return True
381 381 r = origmatchfn(f)
382 382 return r
383 383 m.matchfn = lfmatchfn
384 384
385 385 ui.debug('updated patterns: %s\n' % sorted(pats))
386 386 return m, pats
387 387
388 388 # For hg log --patch, the match object is used in two different senses:
389 389 # (1) to determine what revisions should be printed out, and
390 390 # (2) to determine what files to print out diffs for.
391 391 # The magic matchandpats override should be used for case (1) but not for
392 392 # case (2).
393 393 def overridemakelogfilematcher(repo, pats, opts, badfn=None):
394 394 wctx = repo[None]
395 395 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
396 396 return lambda rev: match
397 397
398 398 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
399 399 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
400 400 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
401 401
402 402 try:
403 403 return orig(ui, repo, *pats, **opts)
404 404 finally:
405 405 restorematchandpatsfn()
406 406 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
407 407
408 408 def overrideverify(orig, ui, repo, *pats, **opts):
409 409 large = opts.pop('large', False)
410 410 all = opts.pop('lfa', False)
411 411 contents = opts.pop('lfc', False)
412 412
413 413 result = orig(ui, repo, *pats, **opts)
414 414 if large or all or contents:
415 415 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
416 416 return result
417 417
418 418 def overridedebugstate(orig, ui, repo, *pats, **opts):
419 419 large = opts.pop('large', False)
420 420 if large:
421 421 class fakerepo(object):
422 422 dirstate = lfutil.openlfdirstate(ui, repo)
423 423 orig(ui, fakerepo, *pats, **opts)
424 424 else:
425 425 orig(ui, repo, *pats, **opts)
426 426
427 427 # Before starting the manifest merge, merge.updates will call
428 428 # _checkunknownfile to check if there are any files in the merged-in
429 429 # changeset that collide with unknown files in the working copy.
430 430 #
431 431 # The largefiles are seen as unknown, so this prevents us from merging
432 432 # in a file 'foo' if we already have a largefile with the same name.
433 433 #
434 434 # The overridden function filters the unknown files by removing any
435 435 # largefiles. This makes the merge proceed and we can then handle this
436 436 # case further in the overridden calculateupdates function below.
437 437 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
438 438 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
439 439 return False
440 440 return origfn(repo, wctx, mctx, f, f2)
441 441
442 442 # The manifest merge handles conflicts on the manifest level. We want
443 443 # to handle changes in largefile-ness of files at this level too.
444 444 #
445 445 # The strategy is to run the original calculateupdates and then process
446 446 # the action list it outputs. There are two cases we need to deal with:
447 447 #
448 448 # 1. Normal file in p1, largefile in p2. Here the largefile is
449 449 # detected via its standin file, which will enter the working copy
450 450 # with a "get" action. It is not "merge" since the standin is all
451 451 # Mercurial is concerned with at this level -- the link to the
452 452 # existing normal file is not relevant here.
453 453 #
454 454 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
455 455 # since the largefile will be present in the working copy and
456 456 # different from the normal file in p2. Mercurial therefore
457 457 # triggers a merge action.
458 458 #
459 459 # In both cases, we prompt the user and emit new actions to either
460 460 # remove the standin (if the normal file was kept) or to remove the
461 461 # normal file and get the standin (if the largefile was kept). The
462 462 # default prompt answer is to use the largefile version since it was
463 463 # presumably changed on purpose.
464 464 #
465 465 # Finally, the merge.applyupdates function will then take care of
466 466 # writing the files into the working copy and lfcommands.updatelfiles
467 467 # will update the largefiles.
468 468 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
469 469 acceptremote, *args, **kwargs):
470 470 overwrite = force and not branchmerge
471 471 actions, diverge, renamedelete = origfn(
472 472 repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs)
473 473
474 474 if overwrite:
475 475 return actions, diverge, renamedelete
476 476
477 477 # Convert to dictionary with filename as key and action as value.
478 478 lfiles = set()
479 479 for f in actions:
480 480 splitstandin = lfutil.splitstandin(f)
481 481 if splitstandin in p1:
482 482 lfiles.add(splitstandin)
483 483 elif lfutil.standin(f) in p1:
484 484 lfiles.add(f)
485 485
486 486 for lfile in sorted(lfiles):
487 487 standin = lfutil.standin(lfile)
488 488 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
489 489 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
490 490 if sm in ('g', 'dc') and lm != 'r':
491 491 if sm == 'dc':
492 492 f1, f2, fa, move, anc = sargs
493 493 sargs = (p2[f2].flags(), False)
494 494 # Case 1: normal file in the working copy, largefile in
495 495 # the second parent
496 496 usermsg = _('remote turned local normal file %s into a largefile\n'
497 497 'use (l)argefile or keep (n)ormal file?'
498 498 '$$ &Largefile $$ &Normal file') % lfile
499 499 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
500 500 actions[lfile] = ('r', None, 'replaced by standin')
501 501 actions[standin] = ('g', sargs, 'replaces standin')
502 502 else: # keep local normal file
503 503 actions[lfile] = ('k', None, 'replaces standin')
504 504 if branchmerge:
505 505 actions[standin] = ('k', None, 'replaced by non-standin')
506 506 else:
507 507 actions[standin] = ('r', None, 'replaced by non-standin')
508 508 elif lm in ('g', 'dc') and sm != 'r':
509 509 if lm == 'dc':
510 510 f1, f2, fa, move, anc = largs
511 511 largs = (p2[f2].flags(), False)
512 512 # Case 2: largefile in the working copy, normal file in
513 513 # the second parent
514 514 usermsg = _('remote turned local largefile %s into a normal file\n'
515 515 'keep (l)argefile or use (n)ormal file?'
516 516 '$$ &Largefile $$ &Normal file') % lfile
517 517 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
518 518 if branchmerge:
519 519 # largefile can be restored from standin safely
520 520 actions[lfile] = ('k', None, 'replaced by standin')
521 521 actions[standin] = ('k', None, 'replaces standin')
522 522 else:
523 523 # "lfile" should be marked as "removed" without
524 524 # removal of itself
525 525 actions[lfile] = ('lfmr', None,
526 526 'forget non-standin largefile')
527 527
528 528 # linear-merge should treat this largefile as 're-added'
529 529 actions[standin] = ('a', None, 'keep standin')
530 530 else: # pick remote normal file
531 531 actions[lfile] = ('g', largs, 'replaces standin')
532 532 actions[standin] = ('r', None, 'replaced by non-standin')
533 533
534 534 return actions, diverge, renamedelete
535 535
536 536 def mergerecordupdates(orig, repo, actions, branchmerge):
537 537 if 'lfmr' in actions:
538 538 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
539 539 for lfile, args, msg in actions['lfmr']:
540 540 # this should be executed before 'orig', to execute 'remove'
541 541 # before all other actions
542 542 repo.dirstate.remove(lfile)
543 543 # make sure lfile doesn't get synclfdirstate'd as normal
544 544 lfdirstate.add(lfile)
545 545 lfdirstate.write()
546 546
547 547 return orig(repo, actions, branchmerge)
548 548
549 549 # Override filemerge to prompt the user about how they wish to merge
550 550 # largefiles. This will handle identical edits without prompting the user.
551 551 def overridefilemerge(origfn, premerge, repo, mynode, orig, fcd, fco, fca,
552 552 labels=None):
553 553 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
554 554 return origfn(premerge, repo, mynode, orig, fcd, fco, fca,
555 555 labels=labels)
556 556
557 557 ahash = fca.data().strip().lower()
558 558 dhash = fcd.data().strip().lower()
559 559 ohash = fco.data().strip().lower()
560 560 if (ohash != ahash and
561 561 ohash != dhash and
562 562 (dhash == ahash or
563 563 repo.ui.promptchoice(
564 564 _('largefile %s has a merge conflict\nancestor was %s\n'
565 565 'keep (l)ocal %s or\ntake (o)ther %s?'
566 566 '$$ &Local $$ &Other') %
567 567 (lfutil.splitstandin(orig), ahash, dhash, ohash),
568 568 0) == 1)):
569 569 repo.wwrite(fcd.path(), fco.data(), fco.flags())
570 570 return True, 0, False
571 571
572 572 def copiespathcopies(orig, ctx1, ctx2, match=None):
573 573 copies = orig(ctx1, ctx2, match=match)
574 574 updated = {}
575 575
576 576 for k, v in copies.iteritems():
577 577 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
578 578
579 579 return updated
580 580
581 581 # Copy first changes the matchers to match standins instead of
582 582 # largefiles. Then it overrides util.copyfile in that function it
583 583 # checks if the destination largefile already exists. It also keeps a
584 584 # list of copied files so that the largefiles can be copied and the
585 585 # dirstate updated.
586 586 def overridecopy(orig, ui, repo, pats, opts, rename=False):
587 587 # doesn't remove largefile on rename
588 588 if len(pats) < 2:
589 589 # this isn't legal, let the original function deal with it
590 590 return orig(ui, repo, pats, opts, rename)
591 591
592 592 # This could copy both lfiles and normal files in one command,
593 593 # but we don't want to do that. First replace their matcher to
594 594 # only match normal files and run it, then replace it to just
595 595 # match largefiles and run it again.
596 596 nonormalfiles = False
597 597 nolfiles = False
598 598 installnormalfilesmatchfn(repo[None].manifest())
599 599 try:
600 600 result = orig(ui, repo, pats, opts, rename)
601 601 except error.Abort as e:
602 602 if str(e) != _('no files to copy'):
603 603 raise e
604 604 else:
605 605 nonormalfiles = True
606 606 result = 0
607 607 finally:
608 608 restorematchfn()
609 609
610 610 # The first rename can cause our current working directory to be removed.
611 611 # In that case there is nothing left to copy/rename so just quit.
612 612 try:
613 613 repo.getcwd()
614 614 except OSError:
615 615 return result
616 616
617 617 def makestandin(relpath):
618 618 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
619 619 return repo.wvfs.join(lfutil.standin(path))
620 620
621 621 fullpats = scmutil.expandpats(pats)
622 622 dest = fullpats[-1]
623 623
624 624 if os.path.isdir(dest):
625 625 if not os.path.isdir(makestandin(dest)):
626 626 os.makedirs(makestandin(dest))
627 627
628 628 try:
629 629 # When we call orig below it creates the standins but we don't add
630 630 # them to the dir state until later so lock during that time.
631 631 wlock = repo.wlock()
632 632
633 633 manifest = repo[None].manifest()
634 634 def overridematch(ctx, pats=(), opts=None, globbed=False,
635 635 default='relpath', badfn=None):
636 636 if opts is None:
637 637 opts = {}
638 638 newpats = []
639 639 # The patterns were previously mangled to add the standin
640 640 # directory; we need to remove that now
641 641 for pat in pats:
642 642 if matchmod.patkind(pat) is None and lfutil.shortname in pat:
643 643 newpats.append(pat.replace(lfutil.shortname, ''))
644 644 else:
645 645 newpats.append(pat)
646 646 match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn)
647 647 m = copy.copy(match)
648 648 lfile = lambda f: lfutil.standin(f) in manifest
649 649 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
650 650 m._fileroots = set(m._files)
651 651 origmatchfn = m.matchfn
652 652 m.matchfn = lambda f: (lfutil.isstandin(f) and
653 653 (f in manifest) and
654 654 origmatchfn(lfutil.splitstandin(f)) or
655 655 None)
656 656 return m
657 657 oldmatch = installmatchfn(overridematch)
658 658 listpats = []
659 659 for pat in pats:
660 660 if matchmod.patkind(pat) is not None:
661 661 listpats.append(pat)
662 662 else:
663 663 listpats.append(makestandin(pat))
664 664
665 665 try:
666 666 origcopyfile = util.copyfile
667 667 copiedfiles = []
668 668 def overridecopyfile(src, dest):
669 669 if (lfutil.shortname in src and
670 670 dest.startswith(repo.wjoin(lfutil.shortname))):
671 671 destlfile = dest.replace(lfutil.shortname, '')
672 672 if not opts['force'] and os.path.exists(destlfile):
673 673 raise IOError('',
674 674 _('destination largefile already exists'))
675 675 copiedfiles.append((src, dest))
676 676 origcopyfile(src, dest)
677 677
678 678 util.copyfile = overridecopyfile
679 679 result += orig(ui, repo, listpats, opts, rename)
680 680 finally:
681 681 util.copyfile = origcopyfile
682 682
683 683 lfdirstate = lfutil.openlfdirstate(ui, repo)
684 684 for (src, dest) in copiedfiles:
685 685 if (lfutil.shortname in src and
686 686 dest.startswith(repo.wjoin(lfutil.shortname))):
687 687 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
688 688 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
689 689 destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or '.'
690 690 if not os.path.isdir(destlfiledir):
691 691 os.makedirs(destlfiledir)
692 692 if rename:
693 693 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
694 694
695 695 # The file is gone, but this deletes any empty parent
696 696 # directories as a side-effect.
697 697 util.unlinkpath(repo.wjoin(srclfile), True)
698 698 lfdirstate.remove(srclfile)
699 699 else:
700 700 util.copyfile(repo.wjoin(srclfile),
701 701 repo.wjoin(destlfile))
702 702
703 703 lfdirstate.add(destlfile)
704 704 lfdirstate.write()
705 705 except error.Abort as e:
706 706 if str(e) != _('no files to copy'):
707 707 raise e
708 708 else:
709 709 nolfiles = True
710 710 finally:
711 711 restorematchfn()
712 712 wlock.release()
713 713
714 714 if nolfiles and nonormalfiles:
715 715 raise error.Abort(_('no files to copy'))
716 716
717 717 return result
718 718
719 719 # When the user calls revert, we have to be careful to not revert any
720 720 # changes to other largefiles accidentally. This means we have to keep
721 721 # track of the largefiles that are being reverted so we only pull down
722 722 # the necessary largefiles.
723 723 #
724 724 # Standins are only updated (to match the hash of largefiles) before
725 725 # commits. Update the standins then run the original revert, changing
726 726 # the matcher to hit standins instead of largefiles. Based on the
727 727 # resulting standins update the largefiles.
728 728 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
729 729 # Because we put the standins in a bad state (by updating them)
730 730 # and then return them to a correct state we need to lock to
731 731 # prevent others from changing them in their incorrect state.
732 732 with repo.wlock():
733 733 lfdirstate = lfutil.openlfdirstate(ui, repo)
734 734 s = lfutil.lfdirstatestatus(lfdirstate, repo)
735 735 lfdirstate.write()
736 736 for lfile in s.modified:
737 737 lfutil.updatestandin(repo, lfutil.standin(lfile))
738 738 for lfile in s.deleted:
739 739 if (repo.wvfs.exists(lfutil.standin(lfile))):
740 740 repo.wvfs.unlink(lfutil.standin(lfile))
741 741
742 742 oldstandins = lfutil.getstandinsstate(repo)
743 743
744 744 def overridematch(mctx, pats=(), opts=None, globbed=False,
745 745 default='relpath', badfn=None):
746 746 if opts is None:
747 747 opts = {}
748 748 match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn)
749 749 m = copy.copy(match)
750 750
751 751 # revert supports recursing into subrepos, and though largefiles
752 752 # currently doesn't work correctly in that case, this match is
753 753 # called, so the lfdirstate above may not be the correct one for
754 754 # this invocation of match.
755 755 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
756 756 False)
757 757
758 758 def tostandin(f):
759 759 standin = lfutil.standin(f)
760 760 if standin in ctx or standin in mctx:
761 761 return standin
762 762 elif standin in repo[None] or lfdirstate[f] == 'r':
763 763 return None
764 764 return f
765 765 m._files = [tostandin(f) for f in m._files]
766 766 m._files = [f for f in m._files if f is not None]
767 767 m._fileroots = set(m._files)
768 768 origmatchfn = m.matchfn
769 769 def matchfn(f):
770 770 if lfutil.isstandin(f):
771 771 return (origmatchfn(lfutil.splitstandin(f)) and
772 772 (f in ctx or f in mctx))
773 773 return origmatchfn(f)
774 774 m.matchfn = matchfn
775 775 return m
776 776 oldmatch = installmatchfn(overridematch)
777 777 try:
778 778 orig(ui, repo, ctx, parents, *pats, **opts)
779 779 finally:
780 780 restorematchfn()
781 781
782 782 newstandins = lfutil.getstandinsstate(repo)
783 783 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
784 784 # lfdirstate should be 'normallookup'-ed for updated files,
785 785 # because reverting doesn't touch dirstate for 'normal' files
786 786 # when target revision is explicitly specified: in such case,
787 787 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
788 788 # of target (standin) file.
789 789 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
790 790 normallookup=True)
791 791
792 792 # after pulling changesets, we need to take some extra care to get
793 793 # largefiles updated remotely
794 794 def overridepull(orig, ui, repo, source=None, **opts):
795 795 revsprepull = len(repo)
796 796 if not source:
797 797 source = 'default'
798 798 repo.lfpullsource = source
799 799 result = orig(ui, repo, source, **opts)
800 800 revspostpull = len(repo)
801 801 lfrevs = opts.get('lfrev', [])
802 802 if opts.get('all_largefiles'):
803 803 lfrevs.append('pulled()')
804 804 if lfrevs and revspostpull > revsprepull:
805 805 numcached = 0
806 806 repo.firstpulled = revsprepull # for pulled() revset expression
807 807 try:
808 808 for rev in scmutil.revrange(repo, lfrevs):
809 809 ui.note(_('pulling largefiles for revision %s\n') % rev)
810 810 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
811 811 numcached += len(cached)
812 812 finally:
813 813 del repo.firstpulled
814 814 ui.status(_("%d largefiles cached\n") % numcached)
815 815 return result
816 816
817 817 def overridepush(orig, ui, repo, *args, **kwargs):
818 818 """Override push command and store --lfrev parameters in opargs"""
819 819 lfrevs = kwargs.pop('lfrev', None)
820 820 if lfrevs:
821 821 opargs = kwargs.setdefault('opargs', {})
822 822 opargs['lfrevs'] = scmutil.revrange(repo, lfrevs)
823 823 return orig(ui, repo, *args, **kwargs)
824 824
825 825 def exchangepushoperation(orig, *args, **kwargs):
826 826 """Override pushoperation constructor and store lfrevs parameter"""
827 827 lfrevs = kwargs.pop('lfrevs', None)
828 828 pushop = orig(*args, **kwargs)
829 829 pushop.lfrevs = lfrevs
830 830 return pushop
831 831
832 832 revsetpredicate = registrar.revsetpredicate()
833 833
834 834 @revsetpredicate('pulled()')
835 835 def pulledrevsetsymbol(repo, subset, x):
836 836 """Changesets that just has been pulled.
837 837
838 838 Only available with largefiles from pull --lfrev expressions.
839 839
840 840 .. container:: verbose
841 841
842 842 Some examples:
843 843
844 844 - pull largefiles for all new changesets::
845 845
846 846 hg pull -lfrev "pulled()"
847 847
848 848 - pull largefiles for all new branch heads::
849 849
850 850 hg pull -lfrev "head(pulled()) and not closed()"
851 851
852 852 """
853 853
854 854 try:
855 855 firstpulled = repo.firstpulled
856 856 except AttributeError:
857 857 raise error.Abort(_("pulled() only available in --lfrev"))
858 858 return smartset.baseset([r for r in subset if r >= firstpulled])
859 859
860 860 def overrideclone(orig, ui, source, dest=None, **opts):
861 861 d = dest
862 862 if d is None:
863 863 d = hg.defaultdest(source)
864 864 if opts.get('all_largefiles') and not hg.islocal(d):
865 865 raise error.Abort(_(
866 866 '--all-largefiles is incompatible with non-local destination %s') %
867 867 d)
868 868
869 869 return orig(ui, source, dest, **opts)
870 870
871 871 def hgclone(orig, ui, opts, *args, **kwargs):
872 872 result = orig(ui, opts, *args, **kwargs)
873 873
874 874 if result is not None:
875 875 sourcerepo, destrepo = result
876 876 repo = destrepo.local()
877 877
878 878 # When cloning to a remote repo (like through SSH), no repo is available
879 879 # from the peer. Therefore the largefiles can't be downloaded and the
880 880 # hgrc can't be updated.
881 881 if not repo:
882 882 return result
883 883
884 884 # If largefiles is required for this repo, permanently enable it locally
885 885 if 'largefiles' in repo.requirements:
886 886 with repo.vfs('hgrc', 'a', text=True) as fp:
887 887 fp.write('\n[extensions]\nlargefiles=\n')
888 888
889 889 # Caching is implicitly limited to 'rev' option, since the dest repo was
890 890 # truncated at that point. The user may expect a download count with
891 891 # this option, so attempt whether or not this is a largefile repo.
892 892 if opts.get('all_largefiles'):
893 893 success, missing = lfcommands.downloadlfiles(ui, repo, None)
894 894
895 895 if missing != 0:
896 896 return None
897 897
898 898 return result
899 899
900 900 def overriderebase(orig, ui, repo, **opts):
901 901 if not util.safehasattr(repo, '_largefilesenabled'):
902 902 return orig(ui, repo, **opts)
903 903
904 904 resuming = opts.get('continue')
905 905 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
906 906 repo._lfstatuswriters.append(lambda *msg, **opts: None)
907 907 try:
908 908 return orig(ui, repo, **opts)
909 909 finally:
910 910 repo._lfstatuswriters.pop()
911 911 repo._lfcommithooks.pop()
912 912
913 913 def overridearchivecmd(orig, ui, repo, dest, **opts):
914 914 repo.unfiltered().lfstatus = True
915 915
916 916 try:
917 917 return orig(ui, repo.unfiltered(), dest, **opts)
918 918 finally:
919 919 repo.unfiltered().lfstatus = False
920 920
921 921 def hgwebarchive(orig, web, req, tmpl):
922 922 web.repo.lfstatus = True
923 923
924 924 try:
925 925 return orig(web, req, tmpl)
926 926 finally:
927 927 web.repo.lfstatus = False
928 928
929 929 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
930 930 prefix='', mtime=None, subrepos=None):
931 931 # For some reason setting repo.lfstatus in hgwebarchive only changes the
932 932 # unfiltered repo's attr, so check that as well.
933 933 if not repo.lfstatus and not repo.unfiltered().lfstatus:
934 934 return orig(repo, dest, node, kind, decode, matchfn, prefix, mtime,
935 935 subrepos)
936 936
937 937 # No need to lock because we are only reading history and
938 938 # largefile caches, neither of which are modified.
939 939 if node is not None:
940 940 lfcommands.cachelfiles(repo.ui, repo, node)
941 941
942 942 if kind not in archival.archivers:
943 943 raise error.Abort(_("unknown archive type '%s'") % kind)
944 944
945 945 ctx = repo[node]
946 946
947 947 if kind == 'files':
948 948 if prefix:
949 949 raise error.Abort(
950 950 _('cannot give prefix when archiving to files'))
951 951 else:
952 952 prefix = archival.tidyprefix(dest, kind, prefix)
953 953
954 954 def write(name, mode, islink, getdata):
955 955 if matchfn and not matchfn(name):
956 956 return
957 957 data = getdata()
958 958 if decode:
959 959 data = repo.wwritedata(name, data)
960 960 archiver.addfile(prefix + name, mode, islink, data)
961 961
962 962 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
963 963
964 964 if repo.ui.configbool("ui", "archivemeta", True):
965 965 write('.hg_archival.txt', 0o644, False,
966 966 lambda: archival.buildmetadata(ctx))
967 967
968 968 for f in ctx:
969 969 ff = ctx.flags(f)
970 970 getdata = ctx[f].data
971 971 if lfutil.isstandin(f):
972 972 if node is not None:
973 973 path = lfutil.findfile(repo, getdata().strip())
974 974
975 975 if path is None:
976 976 raise error.Abort(
977 977 _('largefile %s not found in repo store or system cache')
978 978 % lfutil.splitstandin(f))
979 979 else:
980 980 path = lfutil.splitstandin(f)
981 981
982 982 f = lfutil.splitstandin(f)
983 983
984 984 getdata = lambda: util.readfile(path)
985 985 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
986 986
987 987 if subrepos:
988 988 for subpath in sorted(ctx.substate):
989 989 sub = ctx.workingsub(subpath)
990 990 submatch = matchmod.subdirmatcher(subpath, matchfn)
991 991 sub._repo.lfstatus = True
992 992 sub.archive(archiver, prefix, submatch)
993 993
994 994 archiver.done()
995 995
996 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
996 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True):
997 997 if not repo._repo.lfstatus:
998 return orig(repo, archiver, prefix, match)
998 return orig(repo, archiver, prefix, match, decode)
999 999
1000 1000 repo._get(repo._state + ('hg',))
1001 1001 rev = repo._state[1]
1002 1002 ctx = repo._repo[rev]
1003 1003
1004 1004 if ctx.node() is not None:
1005 1005 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
1006 1006
1007 1007 def write(name, mode, islink, getdata):
1008 1008 # At this point, the standin has been replaced with the largefile name,
1009 1009 # so the normal matcher works here without the lfutil variants.
1010 1010 if match and not match(f):
1011 1011 return
1012 1012 data = getdata()
1013 if decode:
1014 data = repo._repo.wwritedata(name, data)
1013 1015
1014 1016 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
1015 1017
1016 1018 for f in ctx:
1017 1019 ff = ctx.flags(f)
1018 1020 getdata = ctx[f].data
1019 1021 if lfutil.isstandin(f):
1020 1022 if ctx.node() is not None:
1021 1023 path = lfutil.findfile(repo._repo, getdata().strip())
1022 1024
1023 1025 if path is None:
1024 1026 raise error.Abort(
1025 1027 _('largefile %s not found in repo store or system cache')
1026 1028 % lfutil.splitstandin(f))
1027 1029 else:
1028 1030 path = lfutil.splitstandin(f)
1029 1031
1030 1032 f = lfutil.splitstandin(f)
1031 1033
1032 1034 getdata = lambda: util.readfile(os.path.join(prefix, path))
1033 1035
1034 1036 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
1035 1037
1036 1038 for subpath in sorted(ctx.substate):
1037 1039 sub = ctx.workingsub(subpath)
1038 1040 submatch = matchmod.subdirmatcher(subpath, match)
1039 1041 sub._repo.lfstatus = True
1040 sub.archive(archiver, prefix + repo._path + '/', submatch)
1042 sub.archive(archiver, prefix + repo._path + '/', submatch, decode)
1041 1043
1042 1044 # If a largefile is modified, the change is not reflected in its
1043 1045 # standin until a commit. cmdutil.bailifchanged() raises an exception
1044 1046 # if the repo has uncommitted changes. Wrap it to also check if
1045 1047 # largefiles were changed. This is used by bisect, backout and fetch.
1046 1048 def overridebailifchanged(orig, repo, *args, **kwargs):
1047 1049 orig(repo, *args, **kwargs)
1048 1050 repo.lfstatus = True
1049 1051 s = repo.status()
1050 1052 repo.lfstatus = False
1051 1053 if s.modified or s.added or s.removed or s.deleted:
1052 1054 raise error.Abort(_('uncommitted changes'))
1053 1055
1054 1056 def postcommitstatus(orig, repo, *args, **kwargs):
1055 1057 repo.lfstatus = True
1056 1058 try:
1057 1059 return orig(repo, *args, **kwargs)
1058 1060 finally:
1059 1061 repo.lfstatus = False
1060 1062
1061 1063 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly):
1062 1064 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1063 1065 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly)
1064 1066 m = composelargefilematcher(match, repo[None].manifest())
1065 1067
1066 1068 try:
1067 1069 repo.lfstatus = True
1068 1070 s = repo.status(match=m, clean=True)
1069 1071 finally:
1070 1072 repo.lfstatus = False
1071 1073 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1072 1074 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
1073 1075
1074 1076 for f in forget:
1075 1077 if lfutil.standin(f) not in repo.dirstate and not \
1076 1078 repo.wvfs.isdir(lfutil.standin(f)):
1077 1079 ui.warn(_('not removing %s: file is already untracked\n')
1078 1080 % m.rel(f))
1079 1081 bad.append(f)
1080 1082
1081 1083 for f in forget:
1082 1084 if ui.verbose or not m.exact(f):
1083 1085 ui.status(_('removing %s\n') % m.rel(f))
1084 1086
1085 1087 # Need to lock because standin files are deleted then removed from the
1086 1088 # repository and we could race in-between.
1087 1089 with repo.wlock():
1088 1090 lfdirstate = lfutil.openlfdirstate(ui, repo)
1089 1091 for f in forget:
1090 1092 if lfdirstate[f] == 'a':
1091 1093 lfdirstate.drop(f)
1092 1094 else:
1093 1095 lfdirstate.remove(f)
1094 1096 lfdirstate.write()
1095 1097 standins = [lfutil.standin(f) for f in forget]
1096 1098 for f in standins:
1097 1099 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1098 1100 rejected = repo[None].forget(standins)
1099 1101
1100 1102 bad.extend(f for f in rejected if f in m.files())
1101 1103 forgot.extend(f for f in forget if f not in rejected)
1102 1104 return bad, forgot
1103 1105
1104 1106 def _getoutgoings(repo, other, missing, addfunc):
1105 1107 """get pairs of filename and largefile hash in outgoing revisions
1106 1108 in 'missing'.
1107 1109
1108 1110 largefiles already existing on 'other' repository are ignored.
1109 1111
1110 1112 'addfunc' is invoked with each unique pairs of filename and
1111 1113 largefile hash value.
1112 1114 """
1113 1115 knowns = set()
1114 1116 lfhashes = set()
1115 1117 def dedup(fn, lfhash):
1116 1118 k = (fn, lfhash)
1117 1119 if k not in knowns:
1118 1120 knowns.add(k)
1119 1121 lfhashes.add(lfhash)
1120 1122 lfutil.getlfilestoupload(repo, missing, dedup)
1121 1123 if lfhashes:
1122 1124 lfexists = storefactory.openstore(repo, other).exists(lfhashes)
1123 1125 for fn, lfhash in knowns:
1124 1126 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1125 1127 addfunc(fn, lfhash)
1126 1128
1127 1129 def outgoinghook(ui, repo, other, opts, missing):
1128 1130 if opts.pop('large', None):
1129 1131 lfhashes = set()
1130 1132 if ui.debugflag:
1131 1133 toupload = {}
1132 1134 def addfunc(fn, lfhash):
1133 1135 if fn not in toupload:
1134 1136 toupload[fn] = []
1135 1137 toupload[fn].append(lfhash)
1136 1138 lfhashes.add(lfhash)
1137 1139 def showhashes(fn):
1138 1140 for lfhash in sorted(toupload[fn]):
1139 1141 ui.debug(' %s\n' % (lfhash))
1140 1142 else:
1141 1143 toupload = set()
1142 1144 def addfunc(fn, lfhash):
1143 1145 toupload.add(fn)
1144 1146 lfhashes.add(lfhash)
1145 1147 def showhashes(fn):
1146 1148 pass
1147 1149 _getoutgoings(repo, other, missing, addfunc)
1148 1150
1149 1151 if not toupload:
1150 1152 ui.status(_('largefiles: no files to upload\n'))
1151 1153 else:
1152 1154 ui.status(_('largefiles to upload (%d entities):\n')
1153 1155 % (len(lfhashes)))
1154 1156 for file in sorted(toupload):
1155 1157 ui.status(lfutil.splitstandin(file) + '\n')
1156 1158 showhashes(file)
1157 1159 ui.status('\n')
1158 1160
1159 1161 def summaryremotehook(ui, repo, opts, changes):
1160 1162 largeopt = opts.get('large', False)
1161 1163 if changes is None:
1162 1164 if largeopt:
1163 1165 return (False, True) # only outgoing check is needed
1164 1166 else:
1165 1167 return (False, False)
1166 1168 elif largeopt:
1167 1169 url, branch, peer, outgoing = changes[1]
1168 1170 if peer is None:
1169 1171 # i18n: column positioning for "hg summary"
1170 1172 ui.status(_('largefiles: (no remote repo)\n'))
1171 1173 return
1172 1174
1173 1175 toupload = set()
1174 1176 lfhashes = set()
1175 1177 def addfunc(fn, lfhash):
1176 1178 toupload.add(fn)
1177 1179 lfhashes.add(lfhash)
1178 1180 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1179 1181
1180 1182 if not toupload:
1181 1183 # i18n: column positioning for "hg summary"
1182 1184 ui.status(_('largefiles: (no files to upload)\n'))
1183 1185 else:
1184 1186 # i18n: column positioning for "hg summary"
1185 1187 ui.status(_('largefiles: %d entities for %d files to upload\n')
1186 1188 % (len(lfhashes), len(toupload)))
1187 1189
1188 1190 def overridesummary(orig, ui, repo, *pats, **opts):
1189 1191 try:
1190 1192 repo.lfstatus = True
1191 1193 orig(ui, repo, *pats, **opts)
1192 1194 finally:
1193 1195 repo.lfstatus = False
1194 1196
1195 1197 def scmutiladdremove(orig, repo, matcher, prefix, opts=None, dry_run=None,
1196 1198 similarity=None):
1197 1199 if opts is None:
1198 1200 opts = {}
1199 1201 if not lfutil.islfilesrepo(repo):
1200 1202 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1201 1203 # Get the list of missing largefiles so we can remove them
1202 1204 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1203 1205 unsure, s = lfdirstate.status(matchmod.always(repo.root, repo.getcwd()), [],
1204 1206 False, False, False)
1205 1207
1206 1208 # Call into the normal remove code, but the removing of the standin, we want
1207 1209 # to have handled by original addremove. Monkey patching here makes sure
1208 1210 # we don't remove the standin in the largefiles code, preventing a very
1209 1211 # confused state later.
1210 1212 if s.deleted:
1211 1213 m = copy.copy(matcher)
1212 1214
1213 1215 # The m._files and m._map attributes are not changed to the deleted list
1214 1216 # because that affects the m.exact() test, which in turn governs whether
1215 1217 # or not the file name is printed, and how. Simply limit the original
1216 1218 # matches to those in the deleted status list.
1217 1219 matchfn = m.matchfn
1218 1220 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1219 1221
1220 1222 removelargefiles(repo.ui, repo, True, m, **opts)
1221 1223 # Call into the normal add code, and any files that *should* be added as
1222 1224 # largefiles will be
1223 1225 added, bad = addlargefiles(repo.ui, repo, True, matcher, **opts)
1224 1226 # Now that we've handled largefiles, hand off to the original addremove
1225 1227 # function to take care of the rest. Make sure it doesn't do anything with
1226 1228 # largefiles by passing a matcher that will ignore them.
1227 1229 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1228 1230 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1229 1231
1230 1232 # Calling purge with --all will cause the largefiles to be deleted.
1231 1233 # Override repo.status to prevent this from happening.
1232 1234 def overridepurge(orig, ui, repo, *dirs, **opts):
1233 1235 # XXX Monkey patching a repoview will not work. The assigned attribute will
1234 1236 # be set on the unfiltered repo, but we will only lookup attributes in the
1235 1237 # unfiltered repo if the lookup in the repoview object itself fails. As the
1236 1238 # monkey patched method exists on the repoview class the lookup will not
1237 1239 # fail. As a result, the original version will shadow the monkey patched
1238 1240 # one, defeating the monkey patch.
1239 1241 #
1240 1242 # As a work around we use an unfiltered repo here. We should do something
1241 1243 # cleaner instead.
1242 1244 repo = repo.unfiltered()
1243 1245 oldstatus = repo.status
1244 1246 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1245 1247 clean=False, unknown=False, listsubrepos=False):
1246 1248 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1247 1249 listsubrepos)
1248 1250 lfdirstate = lfutil.openlfdirstate(ui, repo)
1249 1251 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1250 1252 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1251 1253 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1252 1254 unknown, ignored, r.clean)
1253 1255 repo.status = overridestatus
1254 1256 orig(ui, repo, *dirs, **opts)
1255 1257 repo.status = oldstatus
1256 1258 def overriderollback(orig, ui, repo, **opts):
1257 1259 with repo.wlock():
1258 1260 before = repo.dirstate.parents()
1259 1261 orphans = set(f for f in repo.dirstate
1260 1262 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1261 1263 result = orig(ui, repo, **opts)
1262 1264 after = repo.dirstate.parents()
1263 1265 if before == after:
1264 1266 return result # no need to restore standins
1265 1267
1266 1268 pctx = repo['.']
1267 1269 for f in repo.dirstate:
1268 1270 if lfutil.isstandin(f):
1269 1271 orphans.discard(f)
1270 1272 if repo.dirstate[f] == 'r':
1271 1273 repo.wvfs.unlinkpath(f, ignoremissing=True)
1272 1274 elif f in pctx:
1273 1275 fctx = pctx[f]
1274 1276 repo.wwrite(f, fctx.data(), fctx.flags())
1275 1277 else:
1276 1278 # content of standin is not so important in 'a',
1277 1279 # 'm' or 'n' (coming from the 2nd parent) cases
1278 1280 lfutil.writestandin(repo, f, '', False)
1279 1281 for standin in orphans:
1280 1282 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1281 1283
1282 1284 lfdirstate = lfutil.openlfdirstate(ui, repo)
1283 1285 orphans = set(lfdirstate)
1284 1286 lfiles = lfutil.listlfiles(repo)
1285 1287 for file in lfiles:
1286 1288 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1287 1289 orphans.discard(file)
1288 1290 for lfile in orphans:
1289 1291 lfdirstate.drop(lfile)
1290 1292 lfdirstate.write()
1291 1293 return result
1292 1294
1293 1295 def overridetransplant(orig, ui, repo, *revs, **opts):
1294 1296 resuming = opts.get('continue')
1295 1297 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1296 1298 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1297 1299 try:
1298 1300 result = orig(ui, repo, *revs, **opts)
1299 1301 finally:
1300 1302 repo._lfstatuswriters.pop()
1301 1303 repo._lfcommithooks.pop()
1302 1304 return result
1303 1305
1304 1306 def overridecat(orig, ui, repo, file1, *pats, **opts):
1305 1307 ctx = scmutil.revsingle(repo, opts.get('rev'))
1306 1308 err = 1
1307 1309 notbad = set()
1308 1310 m = scmutil.match(ctx, (file1,) + pats, opts)
1309 1311 origmatchfn = m.matchfn
1310 1312 def lfmatchfn(f):
1311 1313 if origmatchfn(f):
1312 1314 return True
1313 1315 lf = lfutil.splitstandin(f)
1314 1316 if lf is None:
1315 1317 return False
1316 1318 notbad.add(lf)
1317 1319 return origmatchfn(lf)
1318 1320 m.matchfn = lfmatchfn
1319 1321 origbadfn = m.bad
1320 1322 def lfbadfn(f, msg):
1321 1323 if not f in notbad:
1322 1324 origbadfn(f, msg)
1323 1325 m.bad = lfbadfn
1324 1326
1325 1327 origvisitdirfn = m.visitdir
1326 1328 def lfvisitdirfn(dir):
1327 1329 if dir == lfutil.shortname:
1328 1330 return True
1329 1331 ret = origvisitdirfn(dir)
1330 1332 if ret:
1331 1333 return ret
1332 1334 lf = lfutil.splitstandin(dir)
1333 1335 if lf is None:
1334 1336 return False
1335 1337 return origvisitdirfn(lf)
1336 1338 m.visitdir = lfvisitdirfn
1337 1339
1338 1340 for f in ctx.walk(m):
1339 1341 with cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1340 1342 pathname=f) as fp:
1341 1343 lf = lfutil.splitstandin(f)
1342 1344 if lf is None or origmatchfn(f):
1343 1345 # duplicating unreachable code from commands.cat
1344 1346 data = ctx[f].data()
1345 1347 if opts.get('decode'):
1346 1348 data = repo.wwritedata(f, data)
1347 1349 fp.write(data)
1348 1350 else:
1349 1351 hash = lfutil.readstandin(repo, lf, ctx.rev())
1350 1352 if not lfutil.inusercache(repo.ui, hash):
1351 1353 store = storefactory.openstore(repo)
1352 1354 success, missing = store.get([(lf, hash)])
1353 1355 if len(success) != 1:
1354 1356 raise error.Abort(
1355 1357 _('largefile %s is not in cache and could not be '
1356 1358 'downloaded') % lf)
1357 1359 path = lfutil.usercachepath(repo.ui, hash)
1358 1360 with open(path, "rb") as fpin:
1359 1361 for chunk in util.filechunkiter(fpin):
1360 1362 fp.write(chunk)
1361 1363 err = 0
1362 1364 return err
1363 1365
1364 1366 def mergeupdate(orig, repo, node, branchmerge, force,
1365 1367 *args, **kwargs):
1366 1368 matcher = kwargs.get('matcher', None)
1367 1369 # note if this is a partial update
1368 1370 partial = matcher and not matcher.always()
1369 1371 with repo.wlock():
1370 1372 # branch | | |
1371 1373 # merge | force | partial | action
1372 1374 # -------+-------+---------+--------------
1373 1375 # x | x | x | linear-merge
1374 1376 # o | x | x | branch-merge
1375 1377 # x | o | x | overwrite (as clean update)
1376 1378 # o | o | x | force-branch-merge (*1)
1377 1379 # x | x | o | (*)
1378 1380 # o | x | o | (*)
1379 1381 # x | o | o | overwrite (as revert)
1380 1382 # o | o | o | (*)
1381 1383 #
1382 1384 # (*) don't care
1383 1385 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1384 1386
1385 1387 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1386 1388 unsure, s = lfdirstate.status(matchmod.always(repo.root,
1387 1389 repo.getcwd()),
1388 1390 [], False, True, False)
1389 1391 oldclean = set(s.clean)
1390 1392 pctx = repo['.']
1391 1393 for lfile in unsure + s.modified:
1392 1394 lfileabs = repo.wvfs.join(lfile)
1393 1395 if not repo.wvfs.exists(lfileabs):
1394 1396 continue
1395 1397 lfhash = lfutil.hashrepofile(repo, lfile)
1396 1398 standin = lfutil.standin(lfile)
1397 1399 lfutil.writestandin(repo, standin, lfhash,
1398 1400 lfutil.getexecutable(lfileabs))
1399 1401 if (standin in pctx and
1400 1402 lfhash == lfutil.readstandin(repo, lfile, '.')):
1401 1403 oldclean.add(lfile)
1402 1404 for lfile in s.added:
1403 1405 lfutil.updatestandin(repo, lfutil.standin(lfile))
1404 1406 # mark all clean largefiles as dirty, just in case the update gets
1405 1407 # interrupted before largefiles and lfdirstate are synchronized
1406 1408 for lfile in oldclean:
1407 1409 lfdirstate.normallookup(lfile)
1408 1410 lfdirstate.write()
1409 1411
1410 1412 oldstandins = lfutil.getstandinsstate(repo)
1411 1413
1412 1414 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1413 1415
1414 1416 newstandins = lfutil.getstandinsstate(repo)
1415 1417 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1416 1418
1417 1419 # to avoid leaving all largefiles as dirty and thus rehash them, mark
1418 1420 # all the ones that didn't change as clean
1419 1421 for lfile in oldclean.difference(filelist):
1420 1422 lfdirstate.normal(lfile)
1421 1423 lfdirstate.write()
1422 1424
1423 1425 if branchmerge or force or partial:
1424 1426 filelist.extend(s.deleted + s.removed)
1425 1427
1426 1428 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1427 1429 normallookup=partial)
1428 1430
1429 1431 return result
1430 1432
1431 1433 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1432 1434 result = orig(repo, files, *args, **kwargs)
1433 1435
1434 1436 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1435 1437 if filelist:
1436 1438 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1437 1439 printmessage=False, normallookup=True)
1438 1440
1439 1441 return result
@@ -1,340 +1,340 b''
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import gzip
11 11 import os
12 12 import struct
13 13 import tarfile
14 14 import time
15 15 import zipfile
16 16 import zlib
17 17
18 18 from .i18n import _
19 19
20 20 from . import (
21 21 cmdutil,
22 22 encoding,
23 23 error,
24 24 match as matchmod,
25 25 scmutil,
26 26 util,
27 27 )
28 28 stringio = util.stringio
29 29
30 30 # from unzip source code:
31 31 _UNX_IFREG = 0x8000
32 32 _UNX_IFLNK = 0xa000
33 33
34 34 def tidyprefix(dest, kind, prefix):
35 35 '''choose prefix to use for names in archive. make sure prefix is
36 36 safe for consumers.'''
37 37
38 38 if prefix:
39 39 prefix = util.normpath(prefix)
40 40 else:
41 41 if not isinstance(dest, str):
42 42 raise ValueError('dest must be string if no prefix')
43 43 prefix = os.path.basename(dest)
44 44 lower = prefix.lower()
45 45 for sfx in exts.get(kind, []):
46 46 if lower.endswith(sfx):
47 47 prefix = prefix[:-len(sfx)]
48 48 break
49 49 lpfx = os.path.normpath(util.localpath(prefix))
50 50 prefix = util.pconvert(lpfx)
51 51 if not prefix.endswith('/'):
52 52 prefix += '/'
53 53 # Drop the leading '.' path component if present, so Windows can read the
54 54 # zip files (issue4634)
55 55 if prefix.startswith('./'):
56 56 prefix = prefix[2:]
57 57 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
58 58 raise error.Abort(_('archive prefix contains illegal components'))
59 59 return prefix
60 60
61 61 exts = {
62 62 'tar': ['.tar'],
63 63 'tbz2': ['.tbz2', '.tar.bz2'],
64 64 'tgz': ['.tgz', '.tar.gz'],
65 65 'zip': ['.zip'],
66 66 }
67 67
68 68 def guesskind(dest):
69 69 for kind, extensions in exts.iteritems():
70 70 if any(dest.endswith(ext) for ext in extensions):
71 71 return kind
72 72 return None
73 73
74 74 def _rootctx(repo):
75 75 # repo[0] may be hidden
76 76 for rev in repo:
77 77 return repo[rev]
78 78 return repo['null']
79 79
80 80 def buildmetadata(ctx):
81 81 '''build content of .hg_archival.txt'''
82 82 repo = ctx.repo()
83 83 hex = ctx.hex()
84 84 if ctx.rev() is None:
85 85 hex = ctx.p1().hex()
86 86 if ctx.dirty():
87 87 hex += '+'
88 88
89 89 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
90 90 _rootctx(repo).hex(), hex, encoding.fromlocal(ctx.branch()))
91 91
92 92 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
93 93 if repo.tagtype(t) == 'global')
94 94 if not tags:
95 95 repo.ui.pushbuffer()
96 96 opts = {'template': '{latesttag}\n{latesttagdistance}\n'
97 97 '{changessincelatesttag}',
98 98 'style': '', 'patch': None, 'git': None}
99 99 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
100 100 ltags, dist, changessince = repo.ui.popbuffer().split('\n')
101 101 ltags = ltags.split(':')
102 102 tags = ''.join('latesttag: %s\n' % t for t in ltags)
103 103 tags += 'latesttagdistance: %s\n' % dist
104 104 tags += 'changessincelatesttag: %s\n' % changessince
105 105
106 106 return base + tags
107 107
108 108 class tarit(object):
109 109 '''write archive to tar file or stream. can write uncompressed,
110 110 or compress with gzip or bzip2.'''
111 111
112 112 class GzipFileWithTime(gzip.GzipFile):
113 113
114 114 def __init__(self, *args, **kw):
115 115 timestamp = None
116 116 if 'timestamp' in kw:
117 117 timestamp = kw.pop('timestamp')
118 118 if timestamp is None:
119 119 self.timestamp = time.time()
120 120 else:
121 121 self.timestamp = timestamp
122 122 gzip.GzipFile.__init__(self, *args, **kw)
123 123
124 124 def _write_gzip_header(self):
125 125 self.fileobj.write('\037\213') # magic header
126 126 self.fileobj.write('\010') # compression method
127 127 fname = self.name
128 128 if fname and fname.endswith('.gz'):
129 129 fname = fname[:-3]
130 130 flags = 0
131 131 if fname:
132 132 flags = gzip.FNAME
133 133 self.fileobj.write(chr(flags))
134 134 gzip.write32u(self.fileobj, long(self.timestamp))
135 135 self.fileobj.write('\002')
136 136 self.fileobj.write('\377')
137 137 if fname:
138 138 self.fileobj.write(fname + '\000')
139 139
140 140 def __init__(self, dest, mtime, kind=''):
141 141 self.mtime = mtime
142 142 self.fileobj = None
143 143
144 144 def taropen(mode, name='', fileobj=None):
145 145 if kind == 'gz':
146 146 mode = mode[0]
147 147 if not fileobj:
148 148 fileobj = open(name, mode + 'b')
149 149 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
150 150 zlib.Z_BEST_COMPRESSION,
151 151 fileobj, timestamp=mtime)
152 152 self.fileobj = gzfileobj
153 153 return tarfile.TarFile.taropen(name, mode, gzfileobj)
154 154 else:
155 155 return tarfile.open(name, mode + kind, fileobj)
156 156
157 157 if isinstance(dest, str):
158 158 self.z = taropen('w:', name=dest)
159 159 else:
160 160 self.z = taropen('w|', fileobj=dest)
161 161
162 162 def addfile(self, name, mode, islink, data):
163 163 i = tarfile.TarInfo(name)
164 164 i.mtime = self.mtime
165 165 i.size = len(data)
166 166 if islink:
167 167 i.type = tarfile.SYMTYPE
168 168 i.mode = 0o777
169 169 i.linkname = data
170 170 data = None
171 171 i.size = 0
172 172 else:
173 173 i.mode = mode
174 174 data = stringio(data)
175 175 self.z.addfile(i, data)
176 176
177 177 def done(self):
178 178 self.z.close()
179 179 if self.fileobj:
180 180 self.fileobj.close()
181 181
182 182 class tellable(object):
183 183 '''provide tell method for zipfile.ZipFile when writing to http
184 184 response file object.'''
185 185
186 186 def __init__(self, fp):
187 187 self.fp = fp
188 188 self.offset = 0
189 189
190 190 def __getattr__(self, key):
191 191 return getattr(self.fp, key)
192 192
193 193 def write(self, s):
194 194 self.fp.write(s)
195 195 self.offset += len(s)
196 196
197 197 def tell(self):
198 198 return self.offset
199 199
200 200 class zipit(object):
201 201 '''write archive to zip file or stream. can write uncompressed,
202 202 or compressed with deflate.'''
203 203
204 204 def __init__(self, dest, mtime, compress=True):
205 205 if not isinstance(dest, str):
206 206 try:
207 207 dest.tell()
208 208 except (AttributeError, IOError):
209 209 dest = tellable(dest)
210 210 self.z = zipfile.ZipFile(dest, 'w',
211 211 compress and zipfile.ZIP_DEFLATED or
212 212 zipfile.ZIP_STORED)
213 213
214 214 # Python's zipfile module emits deprecation warnings if we try
215 215 # to store files with a date before 1980.
216 216 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
217 217 if mtime < epoch:
218 218 mtime = epoch
219 219
220 220 self.mtime = mtime
221 221 self.date_time = time.gmtime(mtime)[:6]
222 222
223 223 def addfile(self, name, mode, islink, data):
224 224 i = zipfile.ZipInfo(name, self.date_time)
225 225 i.compress_type = self.z.compression
226 226 # unzip will not honor unix file modes unless file creator is
227 227 # set to unix (id 3).
228 228 i.create_system = 3
229 229 ftype = _UNX_IFREG
230 230 if islink:
231 231 mode = 0o777
232 232 ftype = _UNX_IFLNK
233 233 i.external_attr = (mode | ftype) << 16
234 234 # add "extended-timestamp" extra block, because zip archives
235 235 # without this will be extracted with unexpected timestamp,
236 236 # if TZ is not configured as GMT
237 237 i.extra += struct.pack('<hhBl',
238 238 0x5455, # block type: "extended-timestamp"
239 239 1 + 4, # size of this block
240 240 1, # "modification time is present"
241 241 int(self.mtime)) # last modification (UTC)
242 242 self.z.writestr(i, data)
243 243
244 244 def done(self):
245 245 self.z.close()
246 246
247 247 class fileit(object):
248 248 '''write archive as files in directory.'''
249 249
250 250 def __init__(self, name, mtime):
251 251 self.basedir = name
252 252 self.opener = scmutil.opener(self.basedir)
253 253
254 254 def addfile(self, name, mode, islink, data):
255 255 if islink:
256 256 self.opener.symlink(data, name)
257 257 return
258 258 f = self.opener(name, "w", atomictemp=True)
259 259 f.write(data)
260 260 f.close()
261 261 destfile = os.path.join(self.basedir, name)
262 262 os.chmod(destfile, mode)
263 263
264 264 def done(self):
265 265 pass
266 266
267 267 archivers = {
268 268 'files': fileit,
269 269 'tar': tarit,
270 270 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
271 271 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
272 272 'uzip': lambda name, mtime: zipit(name, mtime, False),
273 273 'zip': zipit,
274 274 }
275 275
276 276 def archive(repo, dest, node, kind, decode=True, matchfn=None,
277 277 prefix='', mtime=None, subrepos=False):
278 278 '''create archive of repo as it was at node.
279 279
280 280 dest can be name of directory, name of archive file, or file
281 281 object to write archive to.
282 282
283 283 kind is type of archive to create.
284 284
285 285 decode tells whether to put files through decode filters from
286 286 hgrc.
287 287
288 288 matchfn is function to filter names of files to write to archive.
289 289
290 290 prefix is name of path to put before every archive member.'''
291 291
292 292 if kind == 'files':
293 293 if prefix:
294 294 raise error.Abort(_('cannot give prefix when archiving to files'))
295 295 else:
296 296 prefix = tidyprefix(dest, kind, prefix)
297 297
298 298 def write(name, mode, islink, getdata):
299 299 data = getdata()
300 300 if decode:
301 301 data = repo.wwritedata(name, data)
302 302 archiver.addfile(prefix + name, mode, islink, data)
303 303
304 304 if kind not in archivers:
305 305 raise error.Abort(_("unknown archive type '%s'") % kind)
306 306
307 307 ctx = repo[node]
308 308 archiver = archivers[kind](dest, mtime or ctx.date()[0])
309 309
310 310 if repo.ui.configbool("ui", "archivemeta", True):
311 311 name = '.hg_archival.txt'
312 312 if not matchfn or matchfn(name):
313 313 write(name, 0o644, False, lambda: buildmetadata(ctx))
314 314
315 315 if matchfn:
316 316 files = [f for f in ctx.manifest().keys() if matchfn(f)]
317 317 else:
318 318 files = ctx.manifest().keys()
319 319 total = len(files)
320 320 if total:
321 321 files.sort()
322 322 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
323 323 for i, f in enumerate(files):
324 324 ff = ctx.flags(f)
325 325 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
326 326 repo.ui.progress(_('archiving'), i + 1, item=f,
327 327 unit=_('files'), total=total)
328 328 repo.ui.progress(_('archiving'), None)
329 329
330 330 if subrepos:
331 331 for subpath in sorted(ctx.substate):
332 332 sub = ctx.workingsub(subpath)
333 333 submatch = matchmod.subdirmatcher(subpath, matchfn)
334 total += sub.archive(archiver, prefix, submatch)
334 total += sub.archive(archiver, prefix, submatch, decode)
335 335
336 336 if total == 0:
337 337 raise error.Abort(_('no files match the archive pattern'))
338 338
339 339 archiver.done()
340 340 return total
@@ -1,1960 +1,1964 b''
1 1 # subrepo.py - sub-repository handling for Mercurial
2 2 #
3 3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import copy
11 11 import errno
12 12 import hashlib
13 13 import os
14 14 import posixpath
15 15 import re
16 16 import stat
17 17 import subprocess
18 18 import sys
19 19 import tarfile
20 20 import xml.dom.minidom
21 21
22 22
23 23 from .i18n import _
24 24 from . import (
25 25 cmdutil,
26 26 config,
27 27 encoding,
28 28 error,
29 29 exchange,
30 30 filemerge,
31 31 match as matchmod,
32 32 node,
33 33 pathutil,
34 34 phases,
35 35 pycompat,
36 36 scmutil,
37 37 util,
38 38 )
39 39
40 40 hg = None
41 41 propertycache = util.propertycache
42 42
43 43 nullstate = ('', '', 'empty')
44 44
45 45 def _expandedabspath(path):
46 46 '''
47 47 get a path or url and if it is a path expand it and return an absolute path
48 48 '''
49 49 expandedpath = util.urllocalpath(util.expandpath(path))
50 50 u = util.url(expandedpath)
51 51 if not u.scheme:
52 52 path = util.normpath(os.path.abspath(u.path))
53 53 return path
54 54
55 55 def _getstorehashcachename(remotepath):
56 56 '''get a unique filename for the store hash cache of a remote repository'''
57 57 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
58 58
59 59 class SubrepoAbort(error.Abort):
60 60 """Exception class used to avoid handling a subrepo error more than once"""
61 61 def __init__(self, *args, **kw):
62 62 self.subrepo = kw.pop('subrepo', None)
63 63 self.cause = kw.pop('cause', None)
64 64 error.Abort.__init__(self, *args, **kw)
65 65
66 66 def annotatesubrepoerror(func):
67 67 def decoratedmethod(self, *args, **kargs):
68 68 try:
69 69 res = func(self, *args, **kargs)
70 70 except SubrepoAbort as ex:
71 71 # This exception has already been handled
72 72 raise ex
73 73 except error.Abort as ex:
74 74 subrepo = subrelpath(self)
75 75 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
76 76 # avoid handling this exception by raising a SubrepoAbort exception
77 77 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
78 78 cause=sys.exc_info())
79 79 return res
80 80 return decoratedmethod
81 81
82 82 def state(ctx, ui):
83 83 """return a state dict, mapping subrepo paths configured in .hgsub
84 84 to tuple: (source from .hgsub, revision from .hgsubstate, kind
85 85 (key in types dict))
86 86 """
87 87 p = config.config()
88 88 repo = ctx.repo()
89 89 def read(f, sections=None, remap=None):
90 90 if f in ctx:
91 91 try:
92 92 data = ctx[f].data()
93 93 except IOError as err:
94 94 if err.errno != errno.ENOENT:
95 95 raise
96 96 # handle missing subrepo spec files as removed
97 97 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
98 98 repo.pathto(f))
99 99 return
100 100 p.parse(f, data, sections, remap, read)
101 101 else:
102 102 raise error.Abort(_("subrepo spec file \'%s\' not found") %
103 103 repo.pathto(f))
104 104 if '.hgsub' in ctx:
105 105 read('.hgsub')
106 106
107 107 for path, src in ui.configitems('subpaths'):
108 108 p.set('subpaths', path, src, ui.configsource('subpaths', path))
109 109
110 110 rev = {}
111 111 if '.hgsubstate' in ctx:
112 112 try:
113 113 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
114 114 l = l.lstrip()
115 115 if not l:
116 116 continue
117 117 try:
118 118 revision, path = l.split(" ", 1)
119 119 except ValueError:
120 120 raise error.Abort(_("invalid subrepository revision "
121 121 "specifier in \'%s\' line %d")
122 122 % (repo.pathto('.hgsubstate'), (i + 1)))
123 123 rev[path] = revision
124 124 except IOError as err:
125 125 if err.errno != errno.ENOENT:
126 126 raise
127 127
128 128 def remap(src):
129 129 for pattern, repl in p.items('subpaths'):
130 130 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
131 131 # does a string decode.
132 132 repl = repl.encode('string-escape')
133 133 # However, we still want to allow back references to go
134 134 # through unharmed, so we turn r'\\1' into r'\1'. Again,
135 135 # extra escapes are needed because re.sub string decodes.
136 136 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
137 137 try:
138 138 src = re.sub(pattern, repl, src, 1)
139 139 except re.error as e:
140 140 raise error.Abort(_("bad subrepository pattern in %s: %s")
141 141 % (p.source('subpaths', pattern), e))
142 142 return src
143 143
144 144 state = {}
145 145 for path, src in p[''].items():
146 146 kind = 'hg'
147 147 if src.startswith('['):
148 148 if ']' not in src:
149 149 raise error.Abort(_('missing ] in subrepo source'))
150 150 kind, src = src.split(']', 1)
151 151 kind = kind[1:]
152 152 src = src.lstrip() # strip any extra whitespace after ']'
153 153
154 154 if not util.url(src).isabs():
155 155 parent = _abssource(repo, abort=False)
156 156 if parent:
157 157 parent = util.url(parent)
158 158 parent.path = posixpath.join(parent.path or '', src)
159 159 parent.path = posixpath.normpath(parent.path)
160 160 joined = str(parent)
161 161 # Remap the full joined path and use it if it changes,
162 162 # else remap the original source.
163 163 remapped = remap(joined)
164 164 if remapped == joined:
165 165 src = remap(src)
166 166 else:
167 167 src = remapped
168 168
169 169 src = remap(src)
170 170 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
171 171
172 172 return state
173 173
174 174 def writestate(repo, state):
175 175 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
176 176 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
177 177 if state[s][1] != nullstate[1]]
178 178 repo.wwrite('.hgsubstate', ''.join(lines), '')
179 179
180 180 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
181 181 """delegated from merge.applyupdates: merging of .hgsubstate file
182 182 in working context, merging context and ancestor context"""
183 183 if mctx == actx: # backwards?
184 184 actx = wctx.p1()
185 185 s1 = wctx.substate
186 186 s2 = mctx.substate
187 187 sa = actx.substate
188 188 sm = {}
189 189
190 190 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
191 191
192 192 def debug(s, msg, r=""):
193 193 if r:
194 194 r = "%s:%s:%s" % r
195 195 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
196 196
197 197 for s, l in sorted(s1.iteritems()):
198 198 a = sa.get(s, nullstate)
199 199 ld = l # local state with possible dirty flag for compares
200 200 if wctx.sub(s).dirty():
201 201 ld = (l[0], l[1] + "+")
202 202 if wctx == actx: # overwrite
203 203 a = ld
204 204
205 205 if s in s2:
206 206 prompts = filemerge.partextras(labels)
207 207 prompts['s'] = s
208 208 r = s2[s]
209 209 if ld == r or r == a: # no change or local is newer
210 210 sm[s] = l
211 211 continue
212 212 elif ld == a: # other side changed
213 213 debug(s, "other changed, get", r)
214 214 wctx.sub(s).get(r, overwrite)
215 215 sm[s] = r
216 216 elif ld[0] != r[0]: # sources differ
217 217 prompts['lo'] = l[0]
218 218 prompts['ro'] = r[0]
219 219 if repo.ui.promptchoice(
220 220 _(' subrepository sources for %(s)s differ\n'
221 221 'use (l)ocal%(l)s source (%(lo)s)'
222 222 ' or (r)emote%(o)s source (%(ro)s)?'
223 223 '$$ &Local $$ &Remote') % prompts, 0):
224 224 debug(s, "prompt changed, get", r)
225 225 wctx.sub(s).get(r, overwrite)
226 226 sm[s] = r
227 227 elif ld[1] == a[1]: # local side is unchanged
228 228 debug(s, "other side changed, get", r)
229 229 wctx.sub(s).get(r, overwrite)
230 230 sm[s] = r
231 231 else:
232 232 debug(s, "both sides changed")
233 233 srepo = wctx.sub(s)
234 234 prompts['sl'] = srepo.shortid(l[1])
235 235 prompts['sr'] = srepo.shortid(r[1])
236 236 option = repo.ui.promptchoice(
237 237 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
238 238 'remote revision: %(sr)s)\n'
239 239 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
240 240 '$$ &Merge $$ &Local $$ &Remote')
241 241 % prompts, 0)
242 242 if option == 0:
243 243 wctx.sub(s).merge(r)
244 244 sm[s] = l
245 245 debug(s, "merge with", r)
246 246 elif option == 1:
247 247 sm[s] = l
248 248 debug(s, "keep local subrepo revision", l)
249 249 else:
250 250 wctx.sub(s).get(r, overwrite)
251 251 sm[s] = r
252 252 debug(s, "get remote subrepo revision", r)
253 253 elif ld == a: # remote removed, local unchanged
254 254 debug(s, "remote removed, remove")
255 255 wctx.sub(s).remove()
256 256 elif a == nullstate: # not present in remote or ancestor
257 257 debug(s, "local added, keep")
258 258 sm[s] = l
259 259 continue
260 260 else:
261 261 if repo.ui.promptchoice(
262 262 _(' local%(l)s changed subrepository %(s)s'
263 263 ' which remote%(o)s removed\n'
264 264 'use (c)hanged version or (d)elete?'
265 265 '$$ &Changed $$ &Delete') % prompts, 0):
266 266 debug(s, "prompt remove")
267 267 wctx.sub(s).remove()
268 268
269 269 for s, r in sorted(s2.items()):
270 270 if s in s1:
271 271 continue
272 272 elif s not in sa:
273 273 debug(s, "remote added, get", r)
274 274 mctx.sub(s).get(r)
275 275 sm[s] = r
276 276 elif r != sa[s]:
277 277 if repo.ui.promptchoice(
278 278 _(' remote%(o)s changed subrepository %(s)s'
279 279 ' which local%(l)s removed\n'
280 280 'use (c)hanged version or (d)elete?'
281 281 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
282 282 debug(s, "prompt recreate", r)
283 283 mctx.sub(s).get(r)
284 284 sm[s] = r
285 285
286 286 # record merged .hgsubstate
287 287 writestate(repo, sm)
288 288 return sm
289 289
290 290 def _updateprompt(ui, sub, dirty, local, remote):
291 291 if dirty:
292 292 msg = (_(' subrepository sources for %s differ\n'
293 293 'use (l)ocal source (%s) or (r)emote source (%s)?'
294 294 '$$ &Local $$ &Remote')
295 295 % (subrelpath(sub), local, remote))
296 296 else:
297 297 msg = (_(' subrepository sources for %s differ (in checked out '
298 298 'version)\n'
299 299 'use (l)ocal source (%s) or (r)emote source (%s)?'
300 300 '$$ &Local $$ &Remote')
301 301 % (subrelpath(sub), local, remote))
302 302 return ui.promptchoice(msg, 0)
303 303
304 304 def reporelpath(repo):
305 305 """return path to this (sub)repo as seen from outermost repo"""
306 306 parent = repo
307 307 while util.safehasattr(parent, '_subparent'):
308 308 parent = parent._subparent
309 309 return repo.root[len(pathutil.normasprefix(parent.root)):]
310 310
311 311 def subrelpath(sub):
312 312 """return path to this subrepo as seen from outermost repo"""
313 313 return sub._relpath
314 314
315 315 def _abssource(repo, push=False, abort=True):
316 316 """return pull/push path of repo - either based on parent repo .hgsub info
317 317 or on the top repo config. Abort or return None if no source found."""
318 318 if util.safehasattr(repo, '_subparent'):
319 319 source = util.url(repo._subsource)
320 320 if source.isabs():
321 321 return str(source)
322 322 source.path = posixpath.normpath(source.path)
323 323 parent = _abssource(repo._subparent, push, abort=False)
324 324 if parent:
325 325 parent = util.url(util.pconvert(parent))
326 326 parent.path = posixpath.join(parent.path or '', source.path)
327 327 parent.path = posixpath.normpath(parent.path)
328 328 return str(parent)
329 329 else: # recursion reached top repo
330 330 if util.safehasattr(repo, '_subtoppath'):
331 331 return repo._subtoppath
332 332 if push and repo.ui.config('paths', 'default-push'):
333 333 return repo.ui.config('paths', 'default-push')
334 334 if repo.ui.config('paths', 'default'):
335 335 return repo.ui.config('paths', 'default')
336 336 if repo.shared():
337 337 # chop off the .hg component to get the default path form
338 338 return os.path.dirname(repo.sharedpath)
339 339 if abort:
340 340 raise error.Abort(_("default path for subrepository not found"))
341 341
342 342 def _sanitize(ui, vfs, ignore):
343 343 for dirname, dirs, names in vfs.walk():
344 344 for i, d in enumerate(dirs):
345 345 if d.lower() == ignore:
346 346 del dirs[i]
347 347 break
348 348 if vfs.basename(dirname).lower() != '.hg':
349 349 continue
350 350 for f in names:
351 351 if f.lower() == 'hgrc':
352 352 ui.warn(_("warning: removing potentially hostile 'hgrc' "
353 353 "in '%s'\n") % vfs.join(dirname))
354 354 vfs.unlink(vfs.reljoin(dirname, f))
355 355
356 356 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
357 357 """return instance of the right subrepo class for subrepo in path"""
358 358 # subrepo inherently violates our import layering rules
359 359 # because it wants to make repo objects from deep inside the stack
360 360 # so we manually delay the circular imports to not break
361 361 # scripts that don't use our demand-loading
362 362 global hg
363 363 from . import hg as h
364 364 hg = h
365 365
366 366 pathutil.pathauditor(ctx.repo().root)(path)
367 367 state = ctx.substate[path]
368 368 if state[2] not in types:
369 369 raise error.Abort(_('unknown subrepo type %s') % state[2])
370 370 if allowwdir:
371 371 state = (state[0], ctx.subrev(path), state[2])
372 372 return types[state[2]](ctx, path, state[:2], allowcreate)
373 373
374 374 def nullsubrepo(ctx, path, pctx):
375 375 """return an empty subrepo in pctx for the extant subrepo in ctx"""
376 376 # subrepo inherently violates our import layering rules
377 377 # because it wants to make repo objects from deep inside the stack
378 378 # so we manually delay the circular imports to not break
379 379 # scripts that don't use our demand-loading
380 380 global hg
381 381 from . import hg as h
382 382 hg = h
383 383
384 384 pathutil.pathauditor(ctx.repo().root)(path)
385 385 state = ctx.substate[path]
386 386 if state[2] not in types:
387 387 raise error.Abort(_('unknown subrepo type %s') % state[2])
388 388 subrev = ''
389 389 if state[2] == 'hg':
390 390 subrev = "0" * 40
391 391 return types[state[2]](pctx, path, (state[0], subrev), True)
392 392
393 393 def newcommitphase(ui, ctx):
394 394 commitphase = phases.newcommitphase(ui)
395 395 substate = getattr(ctx, "substate", None)
396 396 if not substate:
397 397 return commitphase
398 398 check = ui.config('phases', 'checksubrepos', 'follow')
399 399 if check not in ('ignore', 'follow', 'abort'):
400 400 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
401 401 % (check))
402 402 if check == 'ignore':
403 403 return commitphase
404 404 maxphase = phases.public
405 405 maxsub = None
406 406 for s in sorted(substate):
407 407 sub = ctx.sub(s)
408 408 subphase = sub.phase(substate[s][1])
409 409 if maxphase < subphase:
410 410 maxphase = subphase
411 411 maxsub = s
412 412 if commitphase < maxphase:
413 413 if check == 'abort':
414 414 raise error.Abort(_("can't commit in %s phase"
415 415 " conflicting %s from subrepository %s") %
416 416 (phases.phasenames[commitphase],
417 417 phases.phasenames[maxphase], maxsub))
418 418 ui.warn(_("warning: changes are committed in"
419 419 " %s phase from subrepository %s\n") %
420 420 (phases.phasenames[maxphase], maxsub))
421 421 return maxphase
422 422 return commitphase
423 423
424 424 # subrepo classes need to implement the following abstract class:
425 425
426 426 class abstractsubrepo(object):
427 427
428 428 def __init__(self, ctx, path):
429 429 """Initialize abstractsubrepo part
430 430
431 431 ``ctx`` is the context referring this subrepository in the
432 432 parent repository.
433 433
434 434 ``path`` is the path to this subrepository as seen from
435 435 innermost repository.
436 436 """
437 437 self.ui = ctx.repo().ui
438 438 self._ctx = ctx
439 439 self._path = path
440 440
441 441 def storeclean(self, path):
442 442 """
443 443 returns true if the repository has not changed since it was last
444 444 cloned from or pushed to a given repository.
445 445 """
446 446 return False
447 447
448 448 def dirty(self, ignoreupdate=False):
449 449 """returns true if the dirstate of the subrepo is dirty or does not
450 450 match current stored state. If ignoreupdate is true, only check
451 451 whether the subrepo has uncommitted changes in its dirstate.
452 452 """
453 453 raise NotImplementedError
454 454
455 455 def dirtyreason(self, ignoreupdate=False):
456 456 """return reason string if it is ``dirty()``
457 457
458 458 Returned string should have enough information for the message
459 459 of exception.
460 460
461 461 This returns None, otherwise.
462 462 """
463 463 if self.dirty(ignoreupdate=ignoreupdate):
464 464 return _("uncommitted changes in subrepository '%s'"
465 465 ) % subrelpath(self)
466 466
467 467 def bailifchanged(self, ignoreupdate=False, hint=None):
468 468 """raise Abort if subrepository is ``dirty()``
469 469 """
470 470 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
471 471 if dirtyreason:
472 472 raise error.Abort(dirtyreason, hint=hint)
473 473
474 474 def basestate(self):
475 475 """current working directory base state, disregarding .hgsubstate
476 476 state and working directory modifications"""
477 477 raise NotImplementedError
478 478
479 479 def checknested(self, path):
480 480 """check if path is a subrepository within this repository"""
481 481 return False
482 482
483 483 def commit(self, text, user, date):
484 484 """commit the current changes to the subrepo with the given
485 485 log message. Use given user and date if possible. Return the
486 486 new state of the subrepo.
487 487 """
488 488 raise NotImplementedError
489 489
490 490 def phase(self, state):
491 491 """returns phase of specified state in the subrepository.
492 492 """
493 493 return phases.public
494 494
495 495 def remove(self):
496 496 """remove the subrepo
497 497
498 498 (should verify the dirstate is not dirty first)
499 499 """
500 500 raise NotImplementedError
501 501
502 502 def get(self, state, overwrite=False):
503 503 """run whatever commands are needed to put the subrepo into
504 504 this state
505 505 """
506 506 raise NotImplementedError
507 507
508 508 def merge(self, state):
509 509 """merge currently-saved state with the new state."""
510 510 raise NotImplementedError
511 511
512 512 def push(self, opts):
513 513 """perform whatever action is analogous to 'hg push'
514 514
515 515 This may be a no-op on some systems.
516 516 """
517 517 raise NotImplementedError
518 518
519 519 def add(self, ui, match, prefix, explicitonly, **opts):
520 520 return []
521 521
522 522 def addremove(self, matcher, prefix, opts, dry_run, similarity):
523 523 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
524 524 return 1
525 525
526 526 def cat(self, match, prefix, **opts):
527 527 return 1
528 528
529 529 def status(self, rev2, **opts):
530 530 return scmutil.status([], [], [], [], [], [], [])
531 531
532 532 def diff(self, ui, diffopts, node2, match, prefix, **opts):
533 533 pass
534 534
535 535 def outgoing(self, ui, dest, opts):
536 536 return 1
537 537
538 538 def incoming(self, ui, source, opts):
539 539 return 1
540 540
541 541 def files(self):
542 542 """return filename iterator"""
543 543 raise NotImplementedError
544 544
545 def filedata(self, name):
546 """return file data"""
545 def filedata(self, name, decode):
546 """return file data, optionally passed through repo decoders"""
547 547 raise NotImplementedError
548 548
549 549 def fileflags(self, name):
550 550 """return file flags"""
551 551 return ''
552 552
553 553 def getfileset(self, expr):
554 554 """Resolve the fileset expression for this repo"""
555 555 return set()
556 556
557 557 def printfiles(self, ui, m, fm, fmt, subrepos):
558 558 """handle the files command for this subrepo"""
559 559 return 1
560 560
561 def archive(self, archiver, prefix, match=None):
561 def archive(self, archiver, prefix, match=None, decode=True):
562 562 if match is not None:
563 563 files = [f for f in self.files() if match(f)]
564 564 else:
565 565 files = self.files()
566 566 total = len(files)
567 567 relpath = subrelpath(self)
568 568 self.ui.progress(_('archiving (%s)') % relpath, 0,
569 569 unit=_('files'), total=total)
570 570 for i, name in enumerate(files):
571 571 flags = self.fileflags(name)
572 572 mode = 'x' in flags and 0o755 or 0o644
573 573 symlink = 'l' in flags
574 574 archiver.addfile(prefix + self._path + '/' + name,
575 mode, symlink, self.filedata(name))
575 mode, symlink, self.filedata(name, decode))
576 576 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
577 577 unit=_('files'), total=total)
578 578 self.ui.progress(_('archiving (%s)') % relpath, None)
579 579 return total
580 580
581 581 def walk(self, match):
582 582 '''
583 583 walk recursively through the directory tree, finding all files
584 584 matched by the match function
585 585 '''
586 586 pass
587 587
588 588 def forget(self, match, prefix):
589 589 return ([], [])
590 590
591 591 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
592 592 """remove the matched files from the subrepository and the filesystem,
593 593 possibly by force and/or after the file has been removed from the
594 594 filesystem. Return 0 on success, 1 on any warning.
595 595 """
596 596 warnings.append(_("warning: removefiles not implemented (%s)")
597 597 % self._path)
598 598 return 1
599 599
600 600 def revert(self, substate, *pats, **opts):
601 601 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
602 602 % (substate[0], substate[2]))
603 603 return []
604 604
605 605 def shortid(self, revid):
606 606 return revid
607 607
608 608 def verify(self):
609 609 '''verify the integrity of the repository. Return 0 on success or
610 610 warning, 1 on any error.
611 611 '''
612 612 return 0
613 613
614 614 @propertycache
615 615 def wvfs(self):
616 616 """return vfs to access the working directory of this subrepository
617 617 """
618 618 return scmutil.vfs(self._ctx.repo().wvfs.join(self._path))
619 619
620 620 @propertycache
621 621 def _relpath(self):
622 622 """return path to this subrepository as seen from outermost repository
623 623 """
624 624 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
625 625
626 626 class hgsubrepo(abstractsubrepo):
627 627 def __init__(self, ctx, path, state, allowcreate):
628 628 super(hgsubrepo, self).__init__(ctx, path)
629 629 self._state = state
630 630 r = ctx.repo()
631 631 root = r.wjoin(path)
632 632 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
633 633 self._repo = hg.repository(r.baseui, root, create=create)
634 634
635 635 # Propagate the parent's --hidden option
636 636 if r is r.unfiltered():
637 637 self._repo = self._repo.unfiltered()
638 638
639 639 self.ui = self._repo.ui
640 640 for s, k in [('ui', 'commitsubrepos')]:
641 641 v = r.ui.config(s, k)
642 642 if v:
643 643 self.ui.setconfig(s, k, v, 'subrepo')
644 644 # internal config: ui._usedassubrepo
645 645 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
646 646 self._initrepo(r, state[0], create)
647 647
648 648 def storeclean(self, path):
649 649 with self._repo.lock():
650 650 return self._storeclean(path)
651 651
652 652 def _storeclean(self, path):
653 653 clean = True
654 654 itercache = self._calcstorehash(path)
655 655 for filehash in self._readstorehashcache(path):
656 656 if filehash != next(itercache, None):
657 657 clean = False
658 658 break
659 659 if clean:
660 660 # if not empty:
661 661 # the cached and current pull states have a different size
662 662 clean = next(itercache, None) is None
663 663 return clean
664 664
665 665 def _calcstorehash(self, remotepath):
666 666 '''calculate a unique "store hash"
667 667
668 668 This method is used to to detect when there are changes that may
669 669 require a push to a given remote path.'''
670 670 # sort the files that will be hashed in increasing (likely) file size
671 671 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
672 672 yield '# %s\n' % _expandedabspath(remotepath)
673 673 vfs = self._repo.vfs
674 674 for relname in filelist:
675 675 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
676 676 yield '%s = %s\n' % (relname, filehash)
677 677
678 678 @propertycache
679 679 def _cachestorehashvfs(self):
680 680 return scmutil.vfs(self._repo.join('cache/storehash'))
681 681
682 682 def _readstorehashcache(self, remotepath):
683 683 '''read the store hash cache for a given remote repository'''
684 684 cachefile = _getstorehashcachename(remotepath)
685 685 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
686 686
687 687 def _cachestorehash(self, remotepath):
688 688 '''cache the current store hash
689 689
690 690 Each remote repo requires its own store hash cache, because a subrepo
691 691 store may be "clean" versus a given remote repo, but not versus another
692 692 '''
693 693 cachefile = _getstorehashcachename(remotepath)
694 694 with self._repo.lock():
695 695 storehash = list(self._calcstorehash(remotepath))
696 696 vfs = self._cachestorehashvfs
697 697 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
698 698
699 699 def _getctx(self):
700 700 '''fetch the context for this subrepo revision, possibly a workingctx
701 701 '''
702 702 if self._ctx.rev() is None:
703 703 return self._repo[None] # workingctx if parent is workingctx
704 704 else:
705 705 rev = self._state[1]
706 706 return self._repo[rev]
707 707
708 708 @annotatesubrepoerror
709 709 def _initrepo(self, parentrepo, source, create):
710 710 self._repo._subparent = parentrepo
711 711 self._repo._subsource = source
712 712
713 713 if create:
714 714 lines = ['[paths]\n']
715 715
716 716 def addpathconfig(key, value):
717 717 if value:
718 718 lines.append('%s = %s\n' % (key, value))
719 719 self.ui.setconfig('paths', key, value, 'subrepo')
720 720
721 721 defpath = _abssource(self._repo, abort=False)
722 722 defpushpath = _abssource(self._repo, True, abort=False)
723 723 addpathconfig('default', defpath)
724 724 if defpath != defpushpath:
725 725 addpathconfig('default-push', defpushpath)
726 726
727 727 fp = self._repo.vfs("hgrc", "w", text=True)
728 728 try:
729 729 fp.write(''.join(lines))
730 730 finally:
731 731 fp.close()
732 732
733 733 @annotatesubrepoerror
734 734 def add(self, ui, match, prefix, explicitonly, **opts):
735 735 return cmdutil.add(ui, self._repo, match,
736 736 self.wvfs.reljoin(prefix, self._path),
737 737 explicitonly, **opts)
738 738
739 739 @annotatesubrepoerror
740 740 def addremove(self, m, prefix, opts, dry_run, similarity):
741 741 # In the same way as sub directories are processed, once in a subrepo,
742 742 # always entry any of its subrepos. Don't corrupt the options that will
743 743 # be used to process sibling subrepos however.
744 744 opts = copy.copy(opts)
745 745 opts['subrepos'] = True
746 746 return scmutil.addremove(self._repo, m,
747 747 self.wvfs.reljoin(prefix, self._path), opts,
748 748 dry_run, similarity)
749 749
750 750 @annotatesubrepoerror
751 751 def cat(self, match, prefix, **opts):
752 752 rev = self._state[1]
753 753 ctx = self._repo[rev]
754 754 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
755 755
756 756 @annotatesubrepoerror
757 757 def status(self, rev2, **opts):
758 758 try:
759 759 rev1 = self._state[1]
760 760 ctx1 = self._repo[rev1]
761 761 ctx2 = self._repo[rev2]
762 762 return self._repo.status(ctx1, ctx2, **opts)
763 763 except error.RepoLookupError as inst:
764 764 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
765 765 % (inst, subrelpath(self)))
766 766 return scmutil.status([], [], [], [], [], [], [])
767 767
768 768 @annotatesubrepoerror
769 769 def diff(self, ui, diffopts, node2, match, prefix, **opts):
770 770 try:
771 771 node1 = node.bin(self._state[1])
772 772 # We currently expect node2 to come from substate and be
773 773 # in hex format
774 774 if node2 is not None:
775 775 node2 = node.bin(node2)
776 776 cmdutil.diffordiffstat(ui, self._repo, diffopts,
777 777 node1, node2, match,
778 778 prefix=posixpath.join(prefix, self._path),
779 779 listsubrepos=True, **opts)
780 780 except error.RepoLookupError as inst:
781 781 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
782 782 % (inst, subrelpath(self)))
783 783
784 784 @annotatesubrepoerror
785 def archive(self, archiver, prefix, match=None):
785 def archive(self, archiver, prefix, match=None, decode=True):
786 786 self._get(self._state + ('hg',))
787 787 total = abstractsubrepo.archive(self, archiver, prefix, match)
788 788 rev = self._state[1]
789 789 ctx = self._repo[rev]
790 790 for subpath in ctx.substate:
791 791 s = subrepo(ctx, subpath, True)
792 792 submatch = matchmod.subdirmatcher(subpath, match)
793 total += s.archive(archiver, prefix + self._path + '/', submatch)
793 total += s.archive(archiver, prefix + self._path + '/', submatch,
794 decode)
794 795 return total
795 796
796 797 @annotatesubrepoerror
797 798 def dirty(self, ignoreupdate=False):
798 799 r = self._state[1]
799 800 if r == '' and not ignoreupdate: # no state recorded
800 801 return True
801 802 w = self._repo[None]
802 803 if r != w.p1().hex() and not ignoreupdate:
803 804 # different version checked out
804 805 return True
805 806 return w.dirty() # working directory changed
806 807
807 808 def basestate(self):
808 809 return self._repo['.'].hex()
809 810
810 811 def checknested(self, path):
811 812 return self._repo._checknested(self._repo.wjoin(path))
812 813
813 814 @annotatesubrepoerror
814 815 def commit(self, text, user, date):
815 816 # don't bother committing in the subrepo if it's only been
816 817 # updated
817 818 if not self.dirty(True):
818 819 return self._repo['.'].hex()
819 820 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
820 821 n = self._repo.commit(text, user, date)
821 822 if not n:
822 823 return self._repo['.'].hex() # different version checked out
823 824 return node.hex(n)
824 825
825 826 @annotatesubrepoerror
826 827 def phase(self, state):
827 828 return self._repo[state].phase()
828 829
829 830 @annotatesubrepoerror
830 831 def remove(self):
831 832 # we can't fully delete the repository as it may contain
832 833 # local-only history
833 834 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
834 835 hg.clean(self._repo, node.nullid, False)
835 836
836 837 def _get(self, state):
837 838 source, revision, kind = state
838 839 if revision in self._repo.unfiltered():
839 840 return True
840 841 self._repo._subsource = source
841 842 srcurl = _abssource(self._repo)
842 843 other = hg.peer(self._repo, {}, srcurl)
843 844 if len(self._repo) == 0:
844 845 self.ui.status(_('cloning subrepo %s from %s\n')
845 846 % (subrelpath(self), srcurl))
846 847 parentrepo = self._repo._subparent
847 848 # use self._repo.vfs instead of self.wvfs to remove .hg only
848 849 self._repo.vfs.rmtree()
849 850 other, cloned = hg.clone(self._repo._subparent.baseui, {},
850 851 other, self._repo.root,
851 852 update=False)
852 853 self._repo = cloned.local()
853 854 self._initrepo(parentrepo, source, create=True)
854 855 self._cachestorehash(srcurl)
855 856 else:
856 857 self.ui.status(_('pulling subrepo %s from %s\n')
857 858 % (subrelpath(self), srcurl))
858 859 cleansub = self.storeclean(srcurl)
859 860 exchange.pull(self._repo, other)
860 861 if cleansub:
861 862 # keep the repo clean after pull
862 863 self._cachestorehash(srcurl)
863 864 return False
864 865
865 866 @annotatesubrepoerror
866 867 def get(self, state, overwrite=False):
867 868 inrepo = self._get(state)
868 869 source, revision, kind = state
869 870 repo = self._repo
870 871 repo.ui.debug("getting subrepo %s\n" % self._path)
871 872 if inrepo:
872 873 urepo = repo.unfiltered()
873 874 ctx = urepo[revision]
874 875 if ctx.hidden():
875 876 urepo.ui.warn(
876 877 _('revision %s in subrepo %s is hidden\n') \
877 878 % (revision[0:12], self._path))
878 879 repo = urepo
879 880 hg.updaterepo(repo, revision, overwrite)
880 881
881 882 @annotatesubrepoerror
882 883 def merge(self, state):
883 884 self._get(state)
884 885 cur = self._repo['.']
885 886 dst = self._repo[state[1]]
886 887 anc = dst.ancestor(cur)
887 888
888 889 def mergefunc():
889 890 if anc == cur and dst.branch() == cur.branch():
890 891 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
891 892 hg.update(self._repo, state[1])
892 893 elif anc == dst:
893 894 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
894 895 else:
895 896 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
896 897 hg.merge(self._repo, state[1], remind=False)
897 898
898 899 wctx = self._repo[None]
899 900 if self.dirty():
900 901 if anc != dst:
901 902 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
902 903 mergefunc()
903 904 else:
904 905 mergefunc()
905 906 else:
906 907 mergefunc()
907 908
908 909 @annotatesubrepoerror
909 910 def push(self, opts):
910 911 force = opts.get('force')
911 912 newbranch = opts.get('new_branch')
912 913 ssh = opts.get('ssh')
913 914
914 915 # push subrepos depth-first for coherent ordering
915 916 c = self._repo['']
916 917 subs = c.substate # only repos that are committed
917 918 for s in sorted(subs):
918 919 if c.sub(s).push(opts) == 0:
919 920 return False
920 921
921 922 dsturl = _abssource(self._repo, True)
922 923 if not force:
923 924 if self.storeclean(dsturl):
924 925 self.ui.status(
925 926 _('no changes made to subrepo %s since last push to %s\n')
926 927 % (subrelpath(self), dsturl))
927 928 return None
928 929 self.ui.status(_('pushing subrepo %s to %s\n') %
929 930 (subrelpath(self), dsturl))
930 931 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
931 932 res = exchange.push(self._repo, other, force, newbranch=newbranch)
932 933
933 934 # the repo is now clean
934 935 self._cachestorehash(dsturl)
935 936 return res.cgresult
936 937
937 938 @annotatesubrepoerror
938 939 def outgoing(self, ui, dest, opts):
939 940 if 'rev' in opts or 'branch' in opts:
940 941 opts = copy.copy(opts)
941 942 opts.pop('rev', None)
942 943 opts.pop('branch', None)
943 944 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
944 945
945 946 @annotatesubrepoerror
946 947 def incoming(self, ui, source, opts):
947 948 if 'rev' in opts or 'branch' in opts:
948 949 opts = copy.copy(opts)
949 950 opts.pop('rev', None)
950 951 opts.pop('branch', None)
951 952 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
952 953
953 954 @annotatesubrepoerror
954 955 def files(self):
955 956 rev = self._state[1]
956 957 ctx = self._repo[rev]
957 958 return ctx.manifest().keys()
958 959
959 def filedata(self, name):
960 def filedata(self, name, decode):
960 961 rev = self._state[1]
961 return self._repo[rev][name].data()
962 data = self._repo[rev][name].data()
963 if decode:
964 data = self._repo.wwritedata(name, data)
965 return data
962 966
963 967 def fileflags(self, name):
964 968 rev = self._state[1]
965 969 ctx = self._repo[rev]
966 970 return ctx.flags(name)
967 971
968 972 @annotatesubrepoerror
969 973 def printfiles(self, ui, m, fm, fmt, subrepos):
970 974 # If the parent context is a workingctx, use the workingctx here for
971 975 # consistency.
972 976 if self._ctx.rev() is None:
973 977 ctx = self._repo[None]
974 978 else:
975 979 rev = self._state[1]
976 980 ctx = self._repo[rev]
977 981 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
978 982
979 983 @annotatesubrepoerror
980 984 def getfileset(self, expr):
981 985 if self._ctx.rev() is None:
982 986 ctx = self._repo[None]
983 987 else:
984 988 rev = self._state[1]
985 989 ctx = self._repo[rev]
986 990
987 991 files = ctx.getfileset(expr)
988 992
989 993 for subpath in ctx.substate:
990 994 sub = ctx.sub(subpath)
991 995
992 996 try:
993 997 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
994 998 except error.LookupError:
995 999 self.ui.status(_("skipping missing subrepository: %s\n")
996 1000 % self.wvfs.reljoin(reporelpath(self), subpath))
997 1001 return files
998 1002
999 1003 def walk(self, match):
1000 1004 ctx = self._repo[None]
1001 1005 return ctx.walk(match)
1002 1006
1003 1007 @annotatesubrepoerror
1004 1008 def forget(self, match, prefix):
1005 1009 return cmdutil.forget(self.ui, self._repo, match,
1006 1010 self.wvfs.reljoin(prefix, self._path), True)
1007 1011
1008 1012 @annotatesubrepoerror
1009 1013 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1010 1014 return cmdutil.remove(self.ui, self._repo, matcher,
1011 1015 self.wvfs.reljoin(prefix, self._path),
1012 1016 after, force, subrepos)
1013 1017
1014 1018 @annotatesubrepoerror
1015 1019 def revert(self, substate, *pats, **opts):
1016 1020 # reverting a subrepo is a 2 step process:
1017 1021 # 1. if the no_backup is not set, revert all modified
1018 1022 # files inside the subrepo
1019 1023 # 2. update the subrepo to the revision specified in
1020 1024 # the corresponding substate dictionary
1021 1025 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1022 1026 if not opts.get('no_backup'):
1023 1027 # Revert all files on the subrepo, creating backups
1024 1028 # Note that this will not recursively revert subrepos
1025 1029 # We could do it if there was a set:subrepos() predicate
1026 1030 opts = opts.copy()
1027 1031 opts['date'] = None
1028 1032 opts['rev'] = substate[1]
1029 1033
1030 1034 self.filerevert(*pats, **opts)
1031 1035
1032 1036 # Update the repo to the revision specified in the given substate
1033 1037 if not opts.get('dry_run'):
1034 1038 self.get(substate, overwrite=True)
1035 1039
1036 1040 def filerevert(self, *pats, **opts):
1037 1041 ctx = self._repo[opts['rev']]
1038 1042 parents = self._repo.dirstate.parents()
1039 1043 if opts.get('all'):
1040 1044 pats = ['set:modified()']
1041 1045 else:
1042 1046 pats = []
1043 1047 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1044 1048
1045 1049 def shortid(self, revid):
1046 1050 return revid[:12]
1047 1051
1048 1052 def verify(self):
1049 1053 try:
1050 1054 rev = self._state[1]
1051 1055 ctx = self._repo.unfiltered()[rev]
1052 1056 if ctx.hidden():
1053 1057 # Since hidden revisions aren't pushed/pulled, it seems worth an
1054 1058 # explicit warning.
1055 1059 ui = self._repo.ui
1056 1060 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1057 1061 (self._relpath, node.short(self._ctx.node())))
1058 1062 return 0
1059 1063 except error.RepoLookupError:
1060 1064 # A missing subrepo revision may be a case of needing to pull it, so
1061 1065 # don't treat this as an error.
1062 1066 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1063 1067 (self._relpath, node.short(self._ctx.node())))
1064 1068 return 0
1065 1069
1066 1070 @propertycache
1067 1071 def wvfs(self):
1068 1072 """return own wvfs for efficiency and consistency
1069 1073 """
1070 1074 return self._repo.wvfs
1071 1075
1072 1076 @propertycache
1073 1077 def _relpath(self):
1074 1078 """return path to this subrepository as seen from outermost repository
1075 1079 """
1076 1080 # Keep consistent dir separators by avoiding vfs.join(self._path)
1077 1081 return reporelpath(self._repo)
1078 1082
1079 1083 class svnsubrepo(abstractsubrepo):
1080 1084 def __init__(self, ctx, path, state, allowcreate):
1081 1085 super(svnsubrepo, self).__init__(ctx, path)
1082 1086 self._state = state
1083 1087 self._exe = util.findexe('svn')
1084 1088 if not self._exe:
1085 1089 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1086 1090 % self._path)
1087 1091
1088 1092 def _svncommand(self, commands, filename='', failok=False):
1089 1093 cmd = [self._exe]
1090 1094 extrakw = {}
1091 1095 if not self.ui.interactive():
1092 1096 # Making stdin be a pipe should prevent svn from behaving
1093 1097 # interactively even if we can't pass --non-interactive.
1094 1098 extrakw['stdin'] = subprocess.PIPE
1095 1099 # Starting in svn 1.5 --non-interactive is a global flag
1096 1100 # instead of being per-command, but we need to support 1.4 so
1097 1101 # we have to be intelligent about what commands take
1098 1102 # --non-interactive.
1099 1103 if commands[0] in ('update', 'checkout', 'commit'):
1100 1104 cmd.append('--non-interactive')
1101 1105 cmd.extend(commands)
1102 1106 if filename is not None:
1103 1107 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1104 1108 self._path, filename)
1105 1109 cmd.append(path)
1106 1110 env = dict(encoding.environ)
1107 1111 # Avoid localized output, preserve current locale for everything else.
1108 1112 lc_all = env.get('LC_ALL')
1109 1113 if lc_all:
1110 1114 env['LANG'] = lc_all
1111 1115 del env['LC_ALL']
1112 1116 env['LC_MESSAGES'] = 'C'
1113 1117 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1114 1118 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1115 1119 universal_newlines=True, env=env, **extrakw)
1116 1120 stdout, stderr = p.communicate()
1117 1121 stderr = stderr.strip()
1118 1122 if not failok:
1119 1123 if p.returncode:
1120 1124 raise error.Abort(stderr or 'exited with code %d'
1121 1125 % p.returncode)
1122 1126 if stderr:
1123 1127 self.ui.warn(stderr + '\n')
1124 1128 return stdout, stderr
1125 1129
1126 1130 @propertycache
1127 1131 def _svnversion(self):
1128 1132 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1129 1133 m = re.search(r'^(\d+)\.(\d+)', output)
1130 1134 if not m:
1131 1135 raise error.Abort(_('cannot retrieve svn tool version'))
1132 1136 return (int(m.group(1)), int(m.group(2)))
1133 1137
1134 1138 def _wcrevs(self):
1135 1139 # Get the working directory revision as well as the last
1136 1140 # commit revision so we can compare the subrepo state with
1137 1141 # both. We used to store the working directory one.
1138 1142 output, err = self._svncommand(['info', '--xml'])
1139 1143 doc = xml.dom.minidom.parseString(output)
1140 1144 entries = doc.getElementsByTagName('entry')
1141 1145 lastrev, rev = '0', '0'
1142 1146 if entries:
1143 1147 rev = str(entries[0].getAttribute('revision')) or '0'
1144 1148 commits = entries[0].getElementsByTagName('commit')
1145 1149 if commits:
1146 1150 lastrev = str(commits[0].getAttribute('revision')) or '0'
1147 1151 return (lastrev, rev)
1148 1152
1149 1153 def _wcrev(self):
1150 1154 return self._wcrevs()[0]
1151 1155
1152 1156 def _wcchanged(self):
1153 1157 """Return (changes, extchanges, missing) where changes is True
1154 1158 if the working directory was changed, extchanges is
1155 1159 True if any of these changes concern an external entry and missing
1156 1160 is True if any change is a missing entry.
1157 1161 """
1158 1162 output, err = self._svncommand(['status', '--xml'])
1159 1163 externals, changes, missing = [], [], []
1160 1164 doc = xml.dom.minidom.parseString(output)
1161 1165 for e in doc.getElementsByTagName('entry'):
1162 1166 s = e.getElementsByTagName('wc-status')
1163 1167 if not s:
1164 1168 continue
1165 1169 item = s[0].getAttribute('item')
1166 1170 props = s[0].getAttribute('props')
1167 1171 path = e.getAttribute('path')
1168 1172 if item == 'external':
1169 1173 externals.append(path)
1170 1174 elif item == 'missing':
1171 1175 missing.append(path)
1172 1176 if (item not in ('', 'normal', 'unversioned', 'external')
1173 1177 or props not in ('', 'none', 'normal')):
1174 1178 changes.append(path)
1175 1179 for path in changes:
1176 1180 for ext in externals:
1177 1181 if path == ext or path.startswith(ext + pycompat.ossep):
1178 1182 return True, True, bool(missing)
1179 1183 return bool(changes), False, bool(missing)
1180 1184
1181 1185 def dirty(self, ignoreupdate=False):
1182 1186 if not self._wcchanged()[0]:
1183 1187 if self._state[1] in self._wcrevs() or ignoreupdate:
1184 1188 return False
1185 1189 return True
1186 1190
1187 1191 def basestate(self):
1188 1192 lastrev, rev = self._wcrevs()
1189 1193 if lastrev != rev:
1190 1194 # Last committed rev is not the same than rev. We would
1191 1195 # like to take lastrev but we do not know if the subrepo
1192 1196 # URL exists at lastrev. Test it and fallback to rev it
1193 1197 # is not there.
1194 1198 try:
1195 1199 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1196 1200 return lastrev
1197 1201 except error.Abort:
1198 1202 pass
1199 1203 return rev
1200 1204
1201 1205 @annotatesubrepoerror
1202 1206 def commit(self, text, user, date):
1203 1207 # user and date are out of our hands since svn is centralized
1204 1208 changed, extchanged, missing = self._wcchanged()
1205 1209 if not changed:
1206 1210 return self.basestate()
1207 1211 if extchanged:
1208 1212 # Do not try to commit externals
1209 1213 raise error.Abort(_('cannot commit svn externals'))
1210 1214 if missing:
1211 1215 # svn can commit with missing entries but aborting like hg
1212 1216 # seems a better approach.
1213 1217 raise error.Abort(_('cannot commit missing svn entries'))
1214 1218 commitinfo, err = self._svncommand(['commit', '-m', text])
1215 1219 self.ui.status(commitinfo)
1216 1220 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1217 1221 if not newrev:
1218 1222 if not commitinfo.strip():
1219 1223 # Sometimes, our definition of "changed" differs from
1220 1224 # svn one. For instance, svn ignores missing files
1221 1225 # when committing. If there are only missing files, no
1222 1226 # commit is made, no output and no error code.
1223 1227 raise error.Abort(_('failed to commit svn changes'))
1224 1228 raise error.Abort(commitinfo.splitlines()[-1])
1225 1229 newrev = newrev.groups()[0]
1226 1230 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1227 1231 return newrev
1228 1232
1229 1233 @annotatesubrepoerror
1230 1234 def remove(self):
1231 1235 if self.dirty():
1232 1236 self.ui.warn(_('not removing repo %s because '
1233 1237 'it has changes.\n') % self._path)
1234 1238 return
1235 1239 self.ui.note(_('removing subrepo %s\n') % self._path)
1236 1240
1237 1241 self.wvfs.rmtree(forcibly=True)
1238 1242 try:
1239 1243 pwvfs = self._ctx.repo().wvfs
1240 1244 pwvfs.removedirs(pwvfs.dirname(self._path))
1241 1245 except OSError:
1242 1246 pass
1243 1247
1244 1248 @annotatesubrepoerror
1245 1249 def get(self, state, overwrite=False):
1246 1250 if overwrite:
1247 1251 self._svncommand(['revert', '--recursive'])
1248 1252 args = ['checkout']
1249 1253 if self._svnversion >= (1, 5):
1250 1254 args.append('--force')
1251 1255 # The revision must be specified at the end of the URL to properly
1252 1256 # update to a directory which has since been deleted and recreated.
1253 1257 args.append('%s@%s' % (state[0], state[1]))
1254 1258 status, err = self._svncommand(args, failok=True)
1255 1259 _sanitize(self.ui, self.wvfs, '.svn')
1256 1260 if not re.search('Checked out revision [0-9]+.', status):
1257 1261 if ('is already a working copy for a different URL' in err
1258 1262 and (self._wcchanged()[:2] == (False, False))):
1259 1263 # obstructed but clean working copy, so just blow it away.
1260 1264 self.remove()
1261 1265 self.get(state, overwrite=False)
1262 1266 return
1263 1267 raise error.Abort((status or err).splitlines()[-1])
1264 1268 self.ui.status(status)
1265 1269
1266 1270 @annotatesubrepoerror
1267 1271 def merge(self, state):
1268 1272 old = self._state[1]
1269 1273 new = state[1]
1270 1274 wcrev = self._wcrev()
1271 1275 if new != wcrev:
1272 1276 dirty = old == wcrev or self._wcchanged()[0]
1273 1277 if _updateprompt(self.ui, self, dirty, wcrev, new):
1274 1278 self.get(state, False)
1275 1279
1276 1280 def push(self, opts):
1277 1281 # push is a no-op for SVN
1278 1282 return True
1279 1283
1280 1284 @annotatesubrepoerror
1281 1285 def files(self):
1282 1286 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1283 1287 doc = xml.dom.minidom.parseString(output)
1284 1288 paths = []
1285 1289 for e in doc.getElementsByTagName('entry'):
1286 1290 kind = str(e.getAttribute('kind'))
1287 1291 if kind != 'file':
1288 1292 continue
1289 1293 name = ''.join(c.data for c
1290 1294 in e.getElementsByTagName('name')[0].childNodes
1291 1295 if c.nodeType == c.TEXT_NODE)
1292 1296 paths.append(name.encode('utf-8'))
1293 1297 return paths
1294 1298
1295 def filedata(self, name):
1299 def filedata(self, name, decode):
1296 1300 return self._svncommand(['cat'], name)[0]
1297 1301
1298 1302
1299 1303 class gitsubrepo(abstractsubrepo):
1300 1304 def __init__(self, ctx, path, state, allowcreate):
1301 1305 super(gitsubrepo, self).__init__(ctx, path)
1302 1306 self._state = state
1303 1307 self._abspath = ctx.repo().wjoin(path)
1304 1308 self._subparent = ctx.repo()
1305 1309 self._ensuregit()
1306 1310
1307 1311 def _ensuregit(self):
1308 1312 try:
1309 1313 self._gitexecutable = 'git'
1310 1314 out, err = self._gitnodir(['--version'])
1311 1315 except OSError as e:
1312 1316 genericerror = _("error executing git for subrepo '%s': %s")
1313 1317 notfoundhint = _("check git is installed and in your PATH")
1314 1318 if e.errno != errno.ENOENT:
1315 1319 raise error.Abort(genericerror % (self._path, e.strerror))
1316 1320 elif pycompat.osname == 'nt':
1317 1321 try:
1318 1322 self._gitexecutable = 'git.cmd'
1319 1323 out, err = self._gitnodir(['--version'])
1320 1324 except OSError as e2:
1321 1325 if e2.errno == errno.ENOENT:
1322 1326 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1323 1327 " for subrepo '%s'") % self._path,
1324 1328 hint=notfoundhint)
1325 1329 else:
1326 1330 raise error.Abort(genericerror % (self._path,
1327 1331 e2.strerror))
1328 1332 else:
1329 1333 raise error.Abort(_("couldn't find git for subrepo '%s'")
1330 1334 % self._path, hint=notfoundhint)
1331 1335 versionstatus = self._checkversion(out)
1332 1336 if versionstatus == 'unknown':
1333 1337 self.ui.warn(_('cannot retrieve git version\n'))
1334 1338 elif versionstatus == 'abort':
1335 1339 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1336 1340 elif versionstatus == 'warning':
1337 1341 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1338 1342
1339 1343 @staticmethod
1340 1344 def _gitversion(out):
1341 1345 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1342 1346 if m:
1343 1347 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1344 1348
1345 1349 m = re.search(r'^git version (\d+)\.(\d+)', out)
1346 1350 if m:
1347 1351 return (int(m.group(1)), int(m.group(2)), 0)
1348 1352
1349 1353 return -1
1350 1354
1351 1355 @staticmethod
1352 1356 def _checkversion(out):
1353 1357 '''ensure git version is new enough
1354 1358
1355 1359 >>> _checkversion = gitsubrepo._checkversion
1356 1360 >>> _checkversion('git version 1.6.0')
1357 1361 'ok'
1358 1362 >>> _checkversion('git version 1.8.5')
1359 1363 'ok'
1360 1364 >>> _checkversion('git version 1.4.0')
1361 1365 'abort'
1362 1366 >>> _checkversion('git version 1.5.0')
1363 1367 'warning'
1364 1368 >>> _checkversion('git version 1.9-rc0')
1365 1369 'ok'
1366 1370 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1367 1371 'ok'
1368 1372 >>> _checkversion('git version 1.9.0.GIT')
1369 1373 'ok'
1370 1374 >>> _checkversion('git version 12345')
1371 1375 'unknown'
1372 1376 >>> _checkversion('no')
1373 1377 'unknown'
1374 1378 '''
1375 1379 version = gitsubrepo._gitversion(out)
1376 1380 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1377 1381 # despite the docstring comment. For now, error on 1.4.0, warn on
1378 1382 # 1.5.0 but attempt to continue.
1379 1383 if version == -1:
1380 1384 return 'unknown'
1381 1385 if version < (1, 5, 0):
1382 1386 return 'abort'
1383 1387 elif version < (1, 6, 0):
1384 1388 return 'warning'
1385 1389 return 'ok'
1386 1390
1387 1391 def _gitcommand(self, commands, env=None, stream=False):
1388 1392 return self._gitdir(commands, env=env, stream=stream)[0]
1389 1393
1390 1394 def _gitdir(self, commands, env=None, stream=False):
1391 1395 return self._gitnodir(commands, env=env, stream=stream,
1392 1396 cwd=self._abspath)
1393 1397
1394 1398 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1395 1399 """Calls the git command
1396 1400
1397 1401 The methods tries to call the git command. versions prior to 1.6.0
1398 1402 are not supported and very probably fail.
1399 1403 """
1400 1404 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1401 1405 if env is None:
1402 1406 env = encoding.environ.copy()
1403 1407 # disable localization for Git output (issue5176)
1404 1408 env['LC_ALL'] = 'C'
1405 1409 # fix for Git CVE-2015-7545
1406 1410 if 'GIT_ALLOW_PROTOCOL' not in env:
1407 1411 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1408 1412 # unless ui.quiet is set, print git's stderr,
1409 1413 # which is mostly progress and useful info
1410 1414 errpipe = None
1411 1415 if self.ui.quiet:
1412 1416 errpipe = open(os.devnull, 'w')
1413 1417 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1414 1418 cwd=cwd, env=env, close_fds=util.closefds,
1415 1419 stdout=subprocess.PIPE, stderr=errpipe)
1416 1420 if stream:
1417 1421 return p.stdout, None
1418 1422
1419 1423 retdata = p.stdout.read().strip()
1420 1424 # wait for the child to exit to avoid race condition.
1421 1425 p.wait()
1422 1426
1423 1427 if p.returncode != 0 and p.returncode != 1:
1424 1428 # there are certain error codes that are ok
1425 1429 command = commands[0]
1426 1430 if command in ('cat-file', 'symbolic-ref'):
1427 1431 return retdata, p.returncode
1428 1432 # for all others, abort
1429 1433 raise error.Abort(_('git %s error %d in %s') %
1430 1434 (command, p.returncode, self._relpath))
1431 1435
1432 1436 return retdata, p.returncode
1433 1437
1434 1438 def _gitmissing(self):
1435 1439 return not self.wvfs.exists('.git')
1436 1440
1437 1441 def _gitstate(self):
1438 1442 return self._gitcommand(['rev-parse', 'HEAD'])
1439 1443
1440 1444 def _gitcurrentbranch(self):
1441 1445 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1442 1446 if err:
1443 1447 current = None
1444 1448 return current
1445 1449
1446 1450 def _gitremote(self, remote):
1447 1451 out = self._gitcommand(['remote', 'show', '-n', remote])
1448 1452 line = out.split('\n')[1]
1449 1453 i = line.index('URL: ') + len('URL: ')
1450 1454 return line[i:]
1451 1455
1452 1456 def _githavelocally(self, revision):
1453 1457 out, code = self._gitdir(['cat-file', '-e', revision])
1454 1458 return code == 0
1455 1459
1456 1460 def _gitisancestor(self, r1, r2):
1457 1461 base = self._gitcommand(['merge-base', r1, r2])
1458 1462 return base == r1
1459 1463
1460 1464 def _gitisbare(self):
1461 1465 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1462 1466
1463 1467 def _gitupdatestat(self):
1464 1468 """This must be run before git diff-index.
1465 1469 diff-index only looks at changes to file stat;
1466 1470 this command looks at file contents and updates the stat."""
1467 1471 self._gitcommand(['update-index', '-q', '--refresh'])
1468 1472
1469 1473 def _gitbranchmap(self):
1470 1474 '''returns 2 things:
1471 1475 a map from git branch to revision
1472 1476 a map from revision to branches'''
1473 1477 branch2rev = {}
1474 1478 rev2branch = {}
1475 1479
1476 1480 out = self._gitcommand(['for-each-ref', '--format',
1477 1481 '%(objectname) %(refname)'])
1478 1482 for line in out.split('\n'):
1479 1483 revision, ref = line.split(' ')
1480 1484 if (not ref.startswith('refs/heads/') and
1481 1485 not ref.startswith('refs/remotes/')):
1482 1486 continue
1483 1487 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1484 1488 continue # ignore remote/HEAD redirects
1485 1489 branch2rev[ref] = revision
1486 1490 rev2branch.setdefault(revision, []).append(ref)
1487 1491 return branch2rev, rev2branch
1488 1492
1489 1493 def _gittracking(self, branches):
1490 1494 'return map of remote branch to local tracking branch'
1491 1495 # assumes no more than one local tracking branch for each remote
1492 1496 tracking = {}
1493 1497 for b in branches:
1494 1498 if b.startswith('refs/remotes/'):
1495 1499 continue
1496 1500 bname = b.split('/', 2)[2]
1497 1501 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1498 1502 if remote:
1499 1503 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1500 1504 tracking['refs/remotes/%s/%s' %
1501 1505 (remote, ref.split('/', 2)[2])] = b
1502 1506 return tracking
1503 1507
1504 1508 def _abssource(self, source):
1505 1509 if '://' not in source:
1506 1510 # recognize the scp syntax as an absolute source
1507 1511 colon = source.find(':')
1508 1512 if colon != -1 and '/' not in source[:colon]:
1509 1513 return source
1510 1514 self._subsource = source
1511 1515 return _abssource(self)
1512 1516
1513 1517 def _fetch(self, source, revision):
1514 1518 if self._gitmissing():
1515 1519 source = self._abssource(source)
1516 1520 self.ui.status(_('cloning subrepo %s from %s\n') %
1517 1521 (self._relpath, source))
1518 1522 self._gitnodir(['clone', source, self._abspath])
1519 1523 if self._githavelocally(revision):
1520 1524 return
1521 1525 self.ui.status(_('pulling subrepo %s from %s\n') %
1522 1526 (self._relpath, self._gitremote('origin')))
1523 1527 # try only origin: the originally cloned repo
1524 1528 self._gitcommand(['fetch'])
1525 1529 if not self._githavelocally(revision):
1526 1530 raise error.Abort(_("revision %s does not exist in subrepo %s\n") %
1527 1531 (revision, self._relpath))
1528 1532
1529 1533 @annotatesubrepoerror
1530 1534 def dirty(self, ignoreupdate=False):
1531 1535 if self._gitmissing():
1532 1536 return self._state[1] != ''
1533 1537 if self._gitisbare():
1534 1538 return True
1535 1539 if not ignoreupdate and self._state[1] != self._gitstate():
1536 1540 # different version checked out
1537 1541 return True
1538 1542 # check for staged changes or modified files; ignore untracked files
1539 1543 self._gitupdatestat()
1540 1544 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1541 1545 return code == 1
1542 1546
1543 1547 def basestate(self):
1544 1548 return self._gitstate()
1545 1549
1546 1550 @annotatesubrepoerror
1547 1551 def get(self, state, overwrite=False):
1548 1552 source, revision, kind = state
1549 1553 if not revision:
1550 1554 self.remove()
1551 1555 return
1552 1556 self._fetch(source, revision)
1553 1557 # if the repo was set to be bare, unbare it
1554 1558 if self._gitisbare():
1555 1559 self._gitcommand(['config', 'core.bare', 'false'])
1556 1560 if self._gitstate() == revision:
1557 1561 self._gitcommand(['reset', '--hard', 'HEAD'])
1558 1562 return
1559 1563 elif self._gitstate() == revision:
1560 1564 if overwrite:
1561 1565 # first reset the index to unmark new files for commit, because
1562 1566 # reset --hard will otherwise throw away files added for commit,
1563 1567 # not just unmark them.
1564 1568 self._gitcommand(['reset', 'HEAD'])
1565 1569 self._gitcommand(['reset', '--hard', 'HEAD'])
1566 1570 return
1567 1571 branch2rev, rev2branch = self._gitbranchmap()
1568 1572
1569 1573 def checkout(args):
1570 1574 cmd = ['checkout']
1571 1575 if overwrite:
1572 1576 # first reset the index to unmark new files for commit, because
1573 1577 # the -f option will otherwise throw away files added for
1574 1578 # commit, not just unmark them.
1575 1579 self._gitcommand(['reset', 'HEAD'])
1576 1580 cmd.append('-f')
1577 1581 self._gitcommand(cmd + args)
1578 1582 _sanitize(self.ui, self.wvfs, '.git')
1579 1583
1580 1584 def rawcheckout():
1581 1585 # no branch to checkout, check it out with no branch
1582 1586 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1583 1587 self._relpath)
1584 1588 self.ui.warn(_('check out a git branch if you intend '
1585 1589 'to make changes\n'))
1586 1590 checkout(['-q', revision])
1587 1591
1588 1592 if revision not in rev2branch:
1589 1593 rawcheckout()
1590 1594 return
1591 1595 branches = rev2branch[revision]
1592 1596 firstlocalbranch = None
1593 1597 for b in branches:
1594 1598 if b == 'refs/heads/master':
1595 1599 # master trumps all other branches
1596 1600 checkout(['refs/heads/master'])
1597 1601 return
1598 1602 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1599 1603 firstlocalbranch = b
1600 1604 if firstlocalbranch:
1601 1605 checkout([firstlocalbranch])
1602 1606 return
1603 1607
1604 1608 tracking = self._gittracking(branch2rev.keys())
1605 1609 # choose a remote branch already tracked if possible
1606 1610 remote = branches[0]
1607 1611 if remote not in tracking:
1608 1612 for b in branches:
1609 1613 if b in tracking:
1610 1614 remote = b
1611 1615 break
1612 1616
1613 1617 if remote not in tracking:
1614 1618 # create a new local tracking branch
1615 1619 local = remote.split('/', 3)[3]
1616 1620 checkout(['-b', local, remote])
1617 1621 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1618 1622 # When updating to a tracked remote branch,
1619 1623 # if the local tracking branch is downstream of it,
1620 1624 # a normal `git pull` would have performed a "fast-forward merge"
1621 1625 # which is equivalent to updating the local branch to the remote.
1622 1626 # Since we are only looking at branching at update, we need to
1623 1627 # detect this situation and perform this action lazily.
1624 1628 if tracking[remote] != self._gitcurrentbranch():
1625 1629 checkout([tracking[remote]])
1626 1630 self._gitcommand(['merge', '--ff', remote])
1627 1631 _sanitize(self.ui, self.wvfs, '.git')
1628 1632 else:
1629 1633 # a real merge would be required, just checkout the revision
1630 1634 rawcheckout()
1631 1635
1632 1636 @annotatesubrepoerror
1633 1637 def commit(self, text, user, date):
1634 1638 if self._gitmissing():
1635 1639 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1636 1640 cmd = ['commit', '-a', '-m', text]
1637 1641 env = encoding.environ.copy()
1638 1642 if user:
1639 1643 cmd += ['--author', user]
1640 1644 if date:
1641 1645 # git's date parser silently ignores when seconds < 1e9
1642 1646 # convert to ISO8601
1643 1647 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1644 1648 '%Y-%m-%dT%H:%M:%S %1%2')
1645 1649 self._gitcommand(cmd, env=env)
1646 1650 # make sure commit works otherwise HEAD might not exist under certain
1647 1651 # circumstances
1648 1652 return self._gitstate()
1649 1653
1650 1654 @annotatesubrepoerror
1651 1655 def merge(self, state):
1652 1656 source, revision, kind = state
1653 1657 self._fetch(source, revision)
1654 1658 base = self._gitcommand(['merge-base', revision, self._state[1]])
1655 1659 self._gitupdatestat()
1656 1660 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1657 1661
1658 1662 def mergefunc():
1659 1663 if base == revision:
1660 1664 self.get(state) # fast forward merge
1661 1665 elif base != self._state[1]:
1662 1666 self._gitcommand(['merge', '--no-commit', revision])
1663 1667 _sanitize(self.ui, self.wvfs, '.git')
1664 1668
1665 1669 if self.dirty():
1666 1670 if self._gitstate() != revision:
1667 1671 dirty = self._gitstate() == self._state[1] or code != 0
1668 1672 if _updateprompt(self.ui, self, dirty,
1669 1673 self._state[1][:7], revision[:7]):
1670 1674 mergefunc()
1671 1675 else:
1672 1676 mergefunc()
1673 1677
1674 1678 @annotatesubrepoerror
1675 1679 def push(self, opts):
1676 1680 force = opts.get('force')
1677 1681
1678 1682 if not self._state[1]:
1679 1683 return True
1680 1684 if self._gitmissing():
1681 1685 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1682 1686 # if a branch in origin contains the revision, nothing to do
1683 1687 branch2rev, rev2branch = self._gitbranchmap()
1684 1688 if self._state[1] in rev2branch:
1685 1689 for b in rev2branch[self._state[1]]:
1686 1690 if b.startswith('refs/remotes/origin/'):
1687 1691 return True
1688 1692 for b, revision in branch2rev.iteritems():
1689 1693 if b.startswith('refs/remotes/origin/'):
1690 1694 if self._gitisancestor(self._state[1], revision):
1691 1695 return True
1692 1696 # otherwise, try to push the currently checked out branch
1693 1697 cmd = ['push']
1694 1698 if force:
1695 1699 cmd.append('--force')
1696 1700
1697 1701 current = self._gitcurrentbranch()
1698 1702 if current:
1699 1703 # determine if the current branch is even useful
1700 1704 if not self._gitisancestor(self._state[1], current):
1701 1705 self.ui.warn(_('unrelated git branch checked out '
1702 1706 'in subrepo %s\n') % self._relpath)
1703 1707 return False
1704 1708 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1705 1709 (current.split('/', 2)[2], self._relpath))
1706 1710 ret = self._gitdir(cmd + ['origin', current])
1707 1711 return ret[1] == 0
1708 1712 else:
1709 1713 self.ui.warn(_('no branch checked out in subrepo %s\n'
1710 1714 'cannot push revision %s\n') %
1711 1715 (self._relpath, self._state[1]))
1712 1716 return False
1713 1717
1714 1718 @annotatesubrepoerror
1715 1719 def add(self, ui, match, prefix, explicitonly, **opts):
1716 1720 if self._gitmissing():
1717 1721 return []
1718 1722
1719 1723 (modified, added, removed,
1720 1724 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1721 1725 clean=True)
1722 1726
1723 1727 tracked = set()
1724 1728 # dirstates 'amn' warn, 'r' is added again
1725 1729 for l in (modified, added, deleted, clean):
1726 1730 tracked.update(l)
1727 1731
1728 1732 # Unknown files not of interest will be rejected by the matcher
1729 1733 files = unknown
1730 1734 files.extend(match.files())
1731 1735
1732 1736 rejected = []
1733 1737
1734 1738 files = [f for f in sorted(set(files)) if match(f)]
1735 1739 for f in files:
1736 1740 exact = match.exact(f)
1737 1741 command = ["add"]
1738 1742 if exact:
1739 1743 command.append("-f") #should be added, even if ignored
1740 1744 if ui.verbose or not exact:
1741 1745 ui.status(_('adding %s\n') % match.rel(f))
1742 1746
1743 1747 if f in tracked: # hg prints 'adding' even if already tracked
1744 1748 if exact:
1745 1749 rejected.append(f)
1746 1750 continue
1747 1751 if not opts.get('dry_run'):
1748 1752 self._gitcommand(command + [f])
1749 1753
1750 1754 for f in rejected:
1751 1755 ui.warn(_("%s already tracked!\n") % match.abs(f))
1752 1756
1753 1757 return rejected
1754 1758
1755 1759 @annotatesubrepoerror
1756 1760 def remove(self):
1757 1761 if self._gitmissing():
1758 1762 return
1759 1763 if self.dirty():
1760 1764 self.ui.warn(_('not removing repo %s because '
1761 1765 'it has changes.\n') % self._relpath)
1762 1766 return
1763 1767 # we can't fully delete the repository as it may contain
1764 1768 # local-only history
1765 1769 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1766 1770 self._gitcommand(['config', 'core.bare', 'true'])
1767 1771 for f, kind in self.wvfs.readdir():
1768 1772 if f == '.git':
1769 1773 continue
1770 1774 if kind == stat.S_IFDIR:
1771 1775 self.wvfs.rmtree(f)
1772 1776 else:
1773 1777 self.wvfs.unlink(f)
1774 1778
1775 def archive(self, archiver, prefix, match=None):
1779 def archive(self, archiver, prefix, match=None, decode=True):
1776 1780 total = 0
1777 1781 source, revision = self._state
1778 1782 if not revision:
1779 1783 return total
1780 1784 self._fetch(source, revision)
1781 1785
1782 1786 # Parse git's native archive command.
1783 1787 # This should be much faster than manually traversing the trees
1784 1788 # and objects with many subprocess calls.
1785 1789 tarstream = self._gitcommand(['archive', revision], stream=True)
1786 1790 tar = tarfile.open(fileobj=tarstream, mode='r|')
1787 1791 relpath = subrelpath(self)
1788 1792 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1789 1793 for i, info in enumerate(tar):
1790 1794 if info.isdir():
1791 1795 continue
1792 1796 if match and not match(info.name):
1793 1797 continue
1794 1798 if info.issym():
1795 1799 data = info.linkname
1796 1800 else:
1797 1801 data = tar.extractfile(info).read()
1798 1802 archiver.addfile(prefix + self._path + '/' + info.name,
1799 1803 info.mode, info.issym(), data)
1800 1804 total += 1
1801 1805 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1802 1806 unit=_('files'))
1803 1807 self.ui.progress(_('archiving (%s)') % relpath, None)
1804 1808 return total
1805 1809
1806 1810
1807 1811 @annotatesubrepoerror
1808 1812 def cat(self, match, prefix, **opts):
1809 1813 rev = self._state[1]
1810 1814 if match.anypats():
1811 1815 return 1 #No support for include/exclude yet
1812 1816
1813 1817 if not match.files():
1814 1818 return 1
1815 1819
1816 1820 for f in match.files():
1817 1821 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1818 1822 fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
1819 1823 self._ctx.node(),
1820 1824 pathname=self.wvfs.reljoin(prefix, f))
1821 1825 fp.write(output)
1822 1826 fp.close()
1823 1827 return 0
1824 1828
1825 1829
1826 1830 @annotatesubrepoerror
1827 1831 def status(self, rev2, **opts):
1828 1832 rev1 = self._state[1]
1829 1833 if self._gitmissing() or not rev1:
1830 1834 # if the repo is missing, return no results
1831 1835 return scmutil.status([], [], [], [], [], [], [])
1832 1836 modified, added, removed = [], [], []
1833 1837 self._gitupdatestat()
1834 1838 if rev2:
1835 1839 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1836 1840 else:
1837 1841 command = ['diff-index', '--no-renames', rev1]
1838 1842 out = self._gitcommand(command)
1839 1843 for line in out.split('\n'):
1840 1844 tab = line.find('\t')
1841 1845 if tab == -1:
1842 1846 continue
1843 1847 status, f = line[tab - 1], line[tab + 1:]
1844 1848 if status == 'M':
1845 1849 modified.append(f)
1846 1850 elif status == 'A':
1847 1851 added.append(f)
1848 1852 elif status == 'D':
1849 1853 removed.append(f)
1850 1854
1851 1855 deleted, unknown, ignored, clean = [], [], [], []
1852 1856
1853 1857 command = ['status', '--porcelain', '-z']
1854 1858 if opts.get('unknown'):
1855 1859 command += ['--untracked-files=all']
1856 1860 if opts.get('ignored'):
1857 1861 command += ['--ignored']
1858 1862 out = self._gitcommand(command)
1859 1863
1860 1864 changedfiles = set()
1861 1865 changedfiles.update(modified)
1862 1866 changedfiles.update(added)
1863 1867 changedfiles.update(removed)
1864 1868 for line in out.split('\0'):
1865 1869 if not line:
1866 1870 continue
1867 1871 st = line[0:2]
1868 1872 #moves and copies show 2 files on one line
1869 1873 if line.find('\0') >= 0:
1870 1874 filename1, filename2 = line[3:].split('\0')
1871 1875 else:
1872 1876 filename1 = line[3:]
1873 1877 filename2 = None
1874 1878
1875 1879 changedfiles.add(filename1)
1876 1880 if filename2:
1877 1881 changedfiles.add(filename2)
1878 1882
1879 1883 if st == '??':
1880 1884 unknown.append(filename1)
1881 1885 elif st == '!!':
1882 1886 ignored.append(filename1)
1883 1887
1884 1888 if opts.get('clean'):
1885 1889 out = self._gitcommand(['ls-files'])
1886 1890 for f in out.split('\n'):
1887 1891 if not f in changedfiles:
1888 1892 clean.append(f)
1889 1893
1890 1894 return scmutil.status(modified, added, removed, deleted,
1891 1895 unknown, ignored, clean)
1892 1896
1893 1897 @annotatesubrepoerror
1894 1898 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1895 1899 node1 = self._state[1]
1896 1900 cmd = ['diff', '--no-renames']
1897 1901 if opts['stat']:
1898 1902 cmd.append('--stat')
1899 1903 else:
1900 1904 # for Git, this also implies '-p'
1901 1905 cmd.append('-U%d' % diffopts.context)
1902 1906
1903 1907 gitprefix = self.wvfs.reljoin(prefix, self._path)
1904 1908
1905 1909 if diffopts.noprefix:
1906 1910 cmd.extend(['--src-prefix=%s/' % gitprefix,
1907 1911 '--dst-prefix=%s/' % gitprefix])
1908 1912 else:
1909 1913 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1910 1914 '--dst-prefix=b/%s/' % gitprefix])
1911 1915
1912 1916 if diffopts.ignorews:
1913 1917 cmd.append('--ignore-all-space')
1914 1918 if diffopts.ignorewsamount:
1915 1919 cmd.append('--ignore-space-change')
1916 1920 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1917 1921 and diffopts.ignoreblanklines:
1918 1922 cmd.append('--ignore-blank-lines')
1919 1923
1920 1924 cmd.append(node1)
1921 1925 if node2:
1922 1926 cmd.append(node2)
1923 1927
1924 1928 output = ""
1925 1929 if match.always():
1926 1930 output += self._gitcommand(cmd) + '\n'
1927 1931 else:
1928 1932 st = self.status(node2)[:3]
1929 1933 files = [f for sublist in st for f in sublist]
1930 1934 for f in files:
1931 1935 if match(f):
1932 1936 output += self._gitcommand(cmd + ['--', f]) + '\n'
1933 1937
1934 1938 if output.strip():
1935 1939 ui.write(output)
1936 1940
1937 1941 @annotatesubrepoerror
1938 1942 def revert(self, substate, *pats, **opts):
1939 1943 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1940 1944 if not opts.get('no_backup'):
1941 1945 status = self.status(None)
1942 1946 names = status.modified
1943 1947 for name in names:
1944 1948 bakname = scmutil.origpath(self.ui, self._subparent, name)
1945 1949 self.ui.note(_('saving current version of %s as %s\n') %
1946 1950 (name, bakname))
1947 1951 self.wvfs.rename(name, bakname)
1948 1952
1949 1953 if not opts.get('dry_run'):
1950 1954 self.get(substate, overwrite=True)
1951 1955 return []
1952 1956
1953 1957 def shortid(self, revid):
1954 1958 return revid[:7]
1955 1959
1956 1960 types = {
1957 1961 'hg': hgsubrepo,
1958 1962 'svn': svnsubrepo,
1959 1963 'git': gitsubrepo,
1960 1964 }
@@ -1,545 +1,561 b''
1 1 Test EOL extension
2 2
3 3 $ cat >> $HGRCPATH <<EOF
4 4 > [diff]
5 5 > git = True
6 6 > EOF
7 7
8 8 Set up helpers
9 9
10 10 $ cat > switch-eol.py <<EOF
11 11 > import sys
12 12 > try:
13 13 > import os, msvcrt
14 14 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
15 15 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
16 16 > except ImportError:
17 17 > pass
18 18 > (old, new) = sys.argv[1] == 'LF' and ('\n', '\r\n') or ('\r\n', '\n')
19 19 > print "%% switching encoding from %r to %r" % (old, new)
20 20 > for path in sys.argv[2:]:
21 21 > data = file(path, 'rb').read()
22 22 > data = data.replace(old, new)
23 23 > file(path, 'wb').write(data)
24 24 > EOF
25 25
26 26 $ seteol () {
27 27 > if [ $1 = "LF" ]; then
28 28 > EOL='\n'
29 29 > else
30 30 > EOL='\r\n'
31 31 > fi
32 32 > }
33 33
34 34 $ makerepo () {
35 35 > seteol $1
36 36 > echo "% setup $1 repository"
37 37 > hg init repo
38 38 > cd repo
39 39 > cat > .hgeol <<EOF
40 40 > [repository]
41 41 > native = $1
42 42 > [patterns]
43 43 > mixed.txt = BIN
44 44 > **.txt = native
45 45 > EOF
46 46 > printf "first${EOL}second${EOL}third${EOL}" > a.txt
47 47 > hg commit --addremove -m 'checkin'
48 48 > echo
49 49 > cd ..
50 50 > }
51 51
52 52 $ dotest () {
53 53 > seteol $1
54 54 > echo "% hg clone repo repo-$1"
55 55 > hg clone --noupdate repo repo-$1
56 56 > cd repo-$1
57 57 > cat > .hg/hgrc <<EOF
58 58 > [extensions]
59 59 > eol =
60 60 > [eol]
61 61 > native = $1
62 62 > EOF
63 63 > hg update
64 64 > echo '% a.txt'
65 65 > cat a.txt
66 66 > echo '% hg cat a.txt'
67 67 > hg cat a.txt
68 68 > printf "fourth${EOL}" >> a.txt
69 69 > echo '% a.txt'
70 70 > cat a.txt
71 71 > hg diff
72 72 > python ../switch-eol.py $1 a.txt
73 73 > echo '% hg diff only reports a single changed line:'
74 74 > hg diff
75 75 > echo "% reverting back to $1 format"
76 76 > hg revert a.txt
77 77 > cat a.txt
78 78 > printf "first\r\nsecond\n" > mixed.txt
79 79 > hg add mixed.txt
80 80 > echo "% hg commit of inconsistent .txt file marked as binary (should work)"
81 81 > hg commit -m 'binary file'
82 82 > echo "% hg commit of inconsistent .txt file marked as native (should fail)"
83 83 > printf "first\nsecond\r\nthird\nfourth\r\n" > a.txt
84 84 > hg commit -m 'inconsistent file'
85 85 > echo "% hg commit --config eol.only-consistent=False (should work)"
86 86 > hg commit --config eol.only-consistent=False -m 'inconsistent file'
87 87 > echo "% hg commit of binary .txt file marked as native (binary files always okay)"
88 88 > printf "first${EOL}\0${EOL}third${EOL}" > a.txt
89 89 > hg commit -m 'binary file'
90 90 > cd ..
91 91 > rm -r repo-$1
92 92 > }
93 93
94 94 $ makemixedrepo () {
95 95 > echo
96 96 > echo "# setup $1 repository"
97 97 > hg init mixed
98 98 > cd mixed
99 99 > printf "foo\r\nbar\r\nbaz\r\n" > win.txt
100 100 > printf "foo\nbar\nbaz\n" > unix.txt
101 101 > #printf "foo\r\nbar\nbaz\r\n" > mixed.txt
102 102 > hg commit --addremove -m 'created mixed files'
103 103 > echo "# setting repository-native EOLs to $1"
104 104 > cat > .hgeol <<EOF
105 105 > [repository]
106 106 > native = $1
107 107 > [patterns]
108 108 > **.txt = native
109 109 > EOF
110 110 > hg commit --addremove -m 'added .hgeol'
111 111 > cd ..
112 112 > }
113 113
114 114 $ testmixed () {
115 115 > echo
116 116 > echo "% hg clone mixed mixed-$1"
117 117 > hg clone mixed mixed-$1
118 118 > cd mixed-$1
119 119 > echo '% hg status (eol extension not yet activated)'
120 120 > hg status
121 121 > cat > .hg/hgrc <<EOF
122 122 > [extensions]
123 123 > eol =
124 124 > [eol]
125 125 > native = $1
126 126 > EOF
127 127 > echo '% hg status (eol activated)'
128 128 > hg status
129 129 > echo '% hg commit'
130 130 > hg commit -m 'synchronized EOLs'
131 131 > echo '% hg status'
132 132 > hg status
133 133 > cd ..
134 134 > rm -r mixed-$1
135 135 > }
136 136
137 137 Basic tests
138 138
139 139 $ makerepo LF
140 140 % setup LF repository
141 141 adding .hgeol
142 142 adding a.txt
143 143
144 144 $ dotest LF
145 145 % hg clone repo repo-LF
146 146 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
147 147 % a.txt
148 148 first
149 149 second
150 150 third
151 151 % hg cat a.txt
152 152 first
153 153 second
154 154 third
155 155 % a.txt
156 156 first
157 157 second
158 158 third
159 159 fourth
160 160 diff --git a/a.txt b/a.txt
161 161 --- a/a.txt
162 162 +++ b/a.txt
163 163 @@ -1,3 +1,4 @@
164 164 first
165 165 second
166 166 third
167 167 +fourth
168 168 % switching encoding from '\n' to '\r\n'
169 169 % hg diff only reports a single changed line:
170 170 diff --git a/a.txt b/a.txt
171 171 --- a/a.txt
172 172 +++ b/a.txt
173 173 @@ -1,3 +1,4 @@
174 174 first
175 175 second
176 176 third
177 177 +fourth
178 178 % reverting back to LF format
179 179 first
180 180 second
181 181 third
182 182 % hg commit of inconsistent .txt file marked as binary (should work)
183 183 % hg commit of inconsistent .txt file marked as native (should fail)
184 184 abort: inconsistent newline style in a.txt
185 185
186 186 % hg commit --config eol.only-consistent=False (should work)
187 187 % hg commit of binary .txt file marked as native (binary files always okay)
188 188 $ dotest CRLF
189 189 % hg clone repo repo-CRLF
190 190 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
191 191 % a.txt
192 192 first\r (esc)
193 193 second\r (esc)
194 194 third\r (esc)
195 195 % hg cat a.txt
196 196 first
197 197 second
198 198 third
199 199 % a.txt
200 200 first\r (esc)
201 201 second\r (esc)
202 202 third\r (esc)
203 203 fourth\r (esc)
204 204 diff --git a/a.txt b/a.txt
205 205 --- a/a.txt
206 206 +++ b/a.txt
207 207 @@ -1,3 +1,4 @@
208 208 first
209 209 second
210 210 third
211 211 +fourth
212 212 % switching encoding from '\r\n' to '\n'
213 213 % hg diff only reports a single changed line:
214 214 diff --git a/a.txt b/a.txt
215 215 --- a/a.txt
216 216 +++ b/a.txt
217 217 @@ -1,3 +1,4 @@
218 218 first
219 219 second
220 220 third
221 221 +fourth
222 222 % reverting back to CRLF format
223 223 first\r (esc)
224 224 second\r (esc)
225 225 third\r (esc)
226 226 % hg commit of inconsistent .txt file marked as binary (should work)
227 227 % hg commit of inconsistent .txt file marked as native (should fail)
228 228 abort: inconsistent newline style in a.txt
229 229
230 230 % hg commit --config eol.only-consistent=False (should work)
231 231 % hg commit of binary .txt file marked as native (binary files always okay)
232 232 $ rm -r repo
233 233 $ makerepo CRLF
234 234 % setup CRLF repository
235 235 adding .hgeol
236 236 adding a.txt
237 237
238 238 $ dotest LF
239 239 % hg clone repo repo-LF
240 240 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
241 241 % a.txt
242 242 first
243 243 second
244 244 third
245 245 % hg cat a.txt
246 246 first\r (esc)
247 247 second\r (esc)
248 248 third\r (esc)
249 249 % a.txt
250 250 first
251 251 second
252 252 third
253 253 fourth
254 254 diff --git a/a.txt b/a.txt
255 255 --- a/a.txt
256 256 +++ b/a.txt
257 257 @@ -1,3 +1,4 @@
258 258 first\r (esc)
259 259 second\r (esc)
260 260 third\r (esc)
261 261 +fourth\r (esc)
262 262 % switching encoding from '\n' to '\r\n'
263 263 % hg diff only reports a single changed line:
264 264 diff --git a/a.txt b/a.txt
265 265 --- a/a.txt
266 266 +++ b/a.txt
267 267 @@ -1,3 +1,4 @@
268 268 first\r (esc)
269 269 second\r (esc)
270 270 third\r (esc)
271 271 +fourth\r (esc)
272 272 % reverting back to LF format
273 273 first
274 274 second
275 275 third
276 276 % hg commit of inconsistent .txt file marked as binary (should work)
277 277 % hg commit of inconsistent .txt file marked as native (should fail)
278 278 abort: inconsistent newline style in a.txt
279 279
280 280 % hg commit --config eol.only-consistent=False (should work)
281 281 % hg commit of binary .txt file marked as native (binary files always okay)
282 282 $ dotest CRLF
283 283 % hg clone repo repo-CRLF
284 284 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
285 285 % a.txt
286 286 first\r (esc)
287 287 second\r (esc)
288 288 third\r (esc)
289 289 % hg cat a.txt
290 290 first\r (esc)
291 291 second\r (esc)
292 292 third\r (esc)
293 293 % a.txt
294 294 first\r (esc)
295 295 second\r (esc)
296 296 third\r (esc)
297 297 fourth\r (esc)
298 298 diff --git a/a.txt b/a.txt
299 299 --- a/a.txt
300 300 +++ b/a.txt
301 301 @@ -1,3 +1,4 @@
302 302 first\r (esc)
303 303 second\r (esc)
304 304 third\r (esc)
305 305 +fourth\r (esc)
306 306 % switching encoding from '\r\n' to '\n'
307 307 % hg diff only reports a single changed line:
308 308 diff --git a/a.txt b/a.txt
309 309 --- a/a.txt
310 310 +++ b/a.txt
311 311 @@ -1,3 +1,4 @@
312 312 first\r (esc)
313 313 second\r (esc)
314 314 third\r (esc)
315 315 +fourth\r (esc)
316 316 % reverting back to CRLF format
317 317 first\r (esc)
318 318 second\r (esc)
319 319 third\r (esc)
320 320 % hg commit of inconsistent .txt file marked as binary (should work)
321 321 % hg commit of inconsistent .txt file marked as native (should fail)
322 322 abort: inconsistent newline style in a.txt
323 323
324 324 % hg commit --config eol.only-consistent=False (should work)
325 325 % hg commit of binary .txt file marked as native (binary files always okay)
326 326 $ rm -r repo
327 327
328 328 Mixed tests
329 329
330 330 $ makemixedrepo LF
331 331
332 332 # setup LF repository
333 333 adding unix.txt
334 334 adding win.txt
335 335 # setting repository-native EOLs to LF
336 336 adding .hgeol
337 337 $ testmixed LF
338 338
339 339 % hg clone mixed mixed-LF
340 340 updating to branch default
341 341 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
342 342 % hg status (eol extension not yet activated)
343 343 % hg status (eol activated)
344 344 M win.txt
345 345 % hg commit
346 346 % hg status
347 347 $ testmixed CRLF
348 348
349 349 % hg clone mixed mixed-CRLF
350 350 updating to branch default
351 351 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
352 352 % hg status (eol extension not yet activated)
353 353 % hg status (eol activated)
354 354 M win.txt
355 355 % hg commit
356 356 % hg status
357 357 $ rm -r mixed
358 358 $ makemixedrepo CRLF
359 359
360 360 # setup CRLF repository
361 361 adding unix.txt
362 362 adding win.txt
363 363 # setting repository-native EOLs to CRLF
364 364 adding .hgeol
365 365 $ testmixed LF
366 366
367 367 % hg clone mixed mixed-LF
368 368 updating to branch default
369 369 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
370 370 % hg status (eol extension not yet activated)
371 371 % hg status (eol activated)
372 372 M unix.txt
373 373 % hg commit
374 374 % hg status
375 375 $ testmixed CRLF
376 376
377 377 % hg clone mixed mixed-CRLF
378 378 updating to branch default
379 379 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
380 380 % hg status (eol extension not yet activated)
381 381 % hg status (eol activated)
382 382 M unix.txt
383 383 % hg commit
384 384 % hg status
385 385 $ rm -r mixed
386 386
387 387 $ echo '[extensions]' >> $HGRCPATH
388 388 $ echo 'eol =' >> $HGRCPATH
389 389
390 390 #if unix-permissions
391 391
392 392 Test issue2569 -- eol extension takes write lock on reading:
393 393
394 394 $ hg init repo
395 395 $ cd repo
396 396 $ touch .hgeol
397 397 $ hg status
398 398 ? .hgeol
399 399 $ chmod -R -w .hg
400 400 $ sleep 1
401 401 $ touch .hgeol
402 402 $ hg status --traceback
403 403 ? .hgeol
404 404 $ chmod -R u+w .hg
405 405 $ cd ..
406 406
407 407 #endif
408 408
409 409 Test cleverencode: and cleverdecode: aliases for win32text extension
410 410
411 411 $ cat <<EOF >> $HGRCPATH
412 412 > [encode]
413 413 > **.txt = cleverencode:
414 414 > [decode]
415 415 > **.txt = cleverdecode:
416 416 > EOF
417 417
418 418 $ hg init win32compat
419 419 $ cd win32compat
420 420 $ printf "foo\r\nbar\r\nbaz\r\n" > win.txt
421 421 $ printf "foo\nbar\nbaz\n" > unix.txt
422 422 $ hg add
423 423 adding unix.txt
424 424 adding win.txt
425 425 $ hg commit -m checkin
426 426
427 427 Check that both files have LF line-endings in the repository:
428 428
429 429 $ hg cat win.txt
430 430 foo
431 431 bar
432 432 baz
433 433 $ hg cat unix.txt
434 434 foo
435 435 bar
436 436 baz
437 437
438 438 Test handling of a broken .hgeol file:
439 439
440 440 $ touch .hgeol
441 441 $ hg add .hgeol
442 442 $ hg commit -m 'clean version'
443 443 $ echo "bad" > .hgeol
444 444 $ hg status
445 445 warning: ignoring .hgeol file due to parse error at .hgeol:1: bad
446 446 M .hgeol
447 447 $ hg revert .hgeol
448 448 warning: ignoring .hgeol file due to parse error at .hgeol:1: bad
449 449 $ hg status
450 450 ? .hgeol.orig
451 451
452 452 Test eol.only-consistent can be specified in .hgeol
453 453
454 454 $ cd $TESTTMP
455 455 $ hg init only-consistent
456 456 $ cd only-consistent
457 457 $ printf "first\nsecond\r\n" > a.txt
458 458 $ hg add a.txt
459 459 $ cat > .hgeol << EOF
460 460 > [eol]
461 461 > only-consistent = True
462 462 > EOF
463 463 $ hg commit -m 'inconsistent'
464 464 abort: inconsistent newline style in a.txt
465 465
466 466 [255]
467 467 $ cat > .hgeol << EOF
468 468 > [eol]
469 469 > only-consistent = False
470 470 > EOF
471 471 $ hg commit -m 'consistent'
472 472
473 $ hg init subrepo
474 $ hg -R subrepo pull -qu .
475 $ echo "subrepo = subrepo" > .hgsub
476 $ hg ci -Am "add subrepo"
477 adding .hgeol
478 adding .hgsub
479 $ hg archive -S ../archive
480 $ find ../archive/* | sort
481 ../archive/a.txt
482 ../archive/subrepo
483 ../archive/subrepo/a.txt
484 $ cat ../archive/a.txt ../archive/subrepo/a.txt
485 first\r (esc)
486 second\r (esc)
487 first\r (esc)
488 second\r (esc)
473 489
474 490 Test trailing newline
475 491
476 492 $ cat >> $HGRCPATH <<EOF
477 493 > [extensions]
478 494 > eol=
479 495 > EOF
480 496
481 497 setup repository
482 498
483 499 $ cd $TESTTMP
484 500 $ hg init trailing
485 501 $ cd trailing
486 502 $ cat > .hgeol <<EOF
487 503 > [patterns]
488 504 > **.txt = native
489 505 > [eol]
490 506 > fix-trailing-newline = False
491 507 > EOF
492 508
493 509 add text without trailing newline
494 510
495 511 $ printf "first\nsecond" > a.txt
496 512 $ hg commit --addremove -m 'checking in'
497 513 adding .hgeol
498 514 adding a.txt
499 515 $ rm a.txt
500 516 $ hg update -C -q
501 517 $ cat a.txt
502 518 first
503 519 second (no-eol)
504 520
505 521 $ cat > .hgeol <<EOF
506 522 > [patterns]
507 523 > **.txt = native
508 524 > [eol]
509 525 > fix-trailing-newline = True
510 526 > EOF
511 527 $ printf "third\nfourth" > a.txt
512 528 $ hg commit -m 'checking in with newline fix'
513 529 $ rm a.txt
514 530 $ hg update -C -q
515 531 $ cat a.txt
516 532 third
517 533 fourth
518 534
519 535 append a line without trailing newline
520 536
521 537 $ printf "fifth" >> a.txt
522 538 $ hg commit -m 'adding another line line'
523 539 $ rm a.txt
524 540 $ hg update -C -q
525 541 $ cat a.txt
526 542 third
527 543 fourth
528 544 fifth
529 545
530 546 amend of changesets with renamed/deleted files expose new code paths
531 547
532 548 $ hg mv a.txt b.txt
533 549 $ hg ci --amend -q
534 550 $ hg diff -c.
535 551 diff --git a/a.txt b/b.txt
536 552 rename from a.txt
537 553 rename to b.txt
538 554 --- a/a.txt
539 555 +++ b/b.txt
540 556 @@ -1,2 +1,3 @@
541 557 third
542 558 fourth
543 559 +fifth
544 560
545 561 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now