##// END OF EJS Templates
ui: merge prompt text components into a singe string...
Matt Mackall -
r19226:c58b6ab4 default
parent child Browse files
Show More
@@ -1,1214 +1,1214 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10 10
11 11 import os
12 12 import copy
13 13
14 14 from mercurial import hg, commands, util, cmdutil, scmutil, match as match_, \
15 15 node, archival, error, merge, discovery
16 16 from mercurial.i18n import _
17 17 from mercurial.node import hex
18 18 from hgext import rebase
19 19
20 20 import lfutil
21 21 import lfcommands
22 22 import basestore
23 23
24 24 # -- Utility functions: commonly/repeatedly needed functionality ---------------
25 25
26 26 def installnormalfilesmatchfn(manifest):
27 27 '''overrides scmutil.match so that the matcher it returns will ignore all
28 28 largefiles'''
29 29 oldmatch = None # for the closure
30 30 def overridematch(ctx, pats=[], opts={}, globbed=False,
31 31 default='relpath'):
32 32 match = oldmatch(ctx, pats, opts, globbed, default)
33 33 m = copy.copy(match)
34 34 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
35 35 manifest)
36 36 m._files = filter(notlfile, m._files)
37 37 m._fmap = set(m._files)
38 38 m._always = False
39 39 origmatchfn = m.matchfn
40 40 m.matchfn = lambda f: notlfile(f) and origmatchfn(f) or None
41 41 return m
42 42 oldmatch = installmatchfn(overridematch)
43 43
44 44 def installmatchfn(f):
45 45 oldmatch = scmutil.match
46 46 setattr(f, 'oldmatch', oldmatch)
47 47 scmutil.match = f
48 48 return oldmatch
49 49
50 50 def restorematchfn():
51 51 '''restores scmutil.match to what it was before installnormalfilesmatchfn
52 52 was called. no-op if scmutil.match is its original function.
53 53
54 54 Note that n calls to installnormalfilesmatchfn will require n calls to
55 55 restore matchfn to reverse'''
56 56 scmutil.match = getattr(scmutil.match, 'oldmatch', scmutil.match)
57 57
58 58 def addlargefiles(ui, repo, *pats, **opts):
59 59 large = opts.pop('large', None)
60 60 lfsize = lfutil.getminsize(
61 61 ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
62 62
63 63 lfmatcher = None
64 64 if lfutil.islfilesrepo(repo):
65 65 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
66 66 if lfpats:
67 67 lfmatcher = match_.match(repo.root, '', list(lfpats))
68 68
69 69 lfnames = []
70 70 m = scmutil.match(repo[None], pats, opts)
71 71 m.bad = lambda x, y: None
72 72 wctx = repo[None]
73 73 for f in repo.walk(m):
74 74 exact = m.exact(f)
75 75 lfile = lfutil.standin(f) in wctx
76 76 nfile = f in wctx
77 77 exists = lfile or nfile
78 78
79 79 # Don't warn the user when they attempt to add a normal tracked file.
80 80 # The normal add code will do that for us.
81 81 if exact and exists:
82 82 if lfile:
83 83 ui.warn(_('%s already a largefile\n') % f)
84 84 continue
85 85
86 86 if (exact or not exists) and not lfutil.isstandin(f):
87 87 wfile = repo.wjoin(f)
88 88
89 89 # In case the file was removed previously, but not committed
90 90 # (issue3507)
91 91 if not os.path.exists(wfile):
92 92 continue
93 93
94 94 abovemin = (lfsize and
95 95 os.lstat(wfile).st_size >= lfsize * 1024 * 1024)
96 96 if large or abovemin or (lfmatcher and lfmatcher(f)):
97 97 lfnames.append(f)
98 98 if ui.verbose or not exact:
99 99 ui.status(_('adding %s as a largefile\n') % m.rel(f))
100 100
101 101 bad = []
102 102 standins = []
103 103
104 104 # Need to lock, otherwise there could be a race condition between
105 105 # when standins are created and added to the repo.
106 106 wlock = repo.wlock()
107 107 try:
108 108 if not opts.get('dry_run'):
109 109 lfdirstate = lfutil.openlfdirstate(ui, repo)
110 110 for f in lfnames:
111 111 standinname = lfutil.standin(f)
112 112 lfutil.writestandin(repo, standinname, hash='',
113 113 executable=lfutil.getexecutable(repo.wjoin(f)))
114 114 standins.append(standinname)
115 115 if lfdirstate[f] == 'r':
116 116 lfdirstate.normallookup(f)
117 117 else:
118 118 lfdirstate.add(f)
119 119 lfdirstate.write()
120 120 bad += [lfutil.splitstandin(f)
121 121 for f in repo[None].add(standins)
122 122 if f in m.files()]
123 123 finally:
124 124 wlock.release()
125 125 return bad
126 126
127 127 def removelargefiles(ui, repo, *pats, **opts):
128 128 after = opts.get('after')
129 129 if not pats and not after:
130 130 raise util.Abort(_('no files specified'))
131 131 m = scmutil.match(repo[None], pats, opts)
132 132 try:
133 133 repo.lfstatus = True
134 134 s = repo.status(match=m, clean=True)
135 135 finally:
136 136 repo.lfstatus = False
137 137 manifest = repo[None].manifest()
138 138 modified, added, deleted, clean = [[f for f in list
139 139 if lfutil.standin(f) in manifest]
140 140 for list in [s[0], s[1], s[3], s[6]]]
141 141
142 142 def warn(files, msg):
143 143 for f in files:
144 144 ui.warn(msg % m.rel(f))
145 145 return int(len(files) > 0)
146 146
147 147 result = 0
148 148
149 149 if after:
150 150 remove, forget = deleted, []
151 151 result = warn(modified + added + clean,
152 152 _('not removing %s: file still exists\n'))
153 153 else:
154 154 remove, forget = deleted + clean, []
155 155 result = warn(modified, _('not removing %s: file is modified (use -f'
156 156 ' to force removal)\n'))
157 157 result = warn(added, _('not removing %s: file has been marked for add'
158 158 ' (use forget to undo)\n')) or result
159 159
160 160 for f in sorted(remove + forget):
161 161 if ui.verbose or not m.exact(f):
162 162 ui.status(_('removing %s\n') % m.rel(f))
163 163
164 164 # Need to lock because standin files are deleted then removed from the
165 165 # repository and we could race in-between.
166 166 wlock = repo.wlock()
167 167 try:
168 168 lfdirstate = lfutil.openlfdirstate(ui, repo)
169 169 for f in remove:
170 170 if not after:
171 171 # If this is being called by addremove, notify the user that we
172 172 # are removing the file.
173 173 if getattr(repo, "_isaddremove", False):
174 174 ui.status(_('removing %s\n') % f)
175 175 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
176 176 lfdirstate.remove(f)
177 177 lfdirstate.write()
178 178 forget = [lfutil.standin(f) for f in forget]
179 179 remove = [lfutil.standin(f) for f in remove]
180 180 repo[None].forget(forget)
181 181 # If this is being called by addremove, let the original addremove
182 182 # function handle this.
183 183 if not getattr(repo, "_isaddremove", False):
184 184 for f in remove:
185 185 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
186 186 repo[None].forget(remove)
187 187 finally:
188 188 wlock.release()
189 189
190 190 return result
191 191
192 192 # For overriding mercurial.hgweb.webcommands so that largefiles will
193 193 # appear at their right place in the manifests.
194 194 def decodepath(orig, path):
195 195 return lfutil.splitstandin(path) or path
196 196
197 197 # -- Wrappers: modify existing commands --------------------------------
198 198
199 199 # Add works by going through the files that the user wanted to add and
200 200 # checking if they should be added as largefiles. Then it makes a new
201 201 # matcher which matches only the normal files and runs the original
202 202 # version of add.
203 203 def overrideadd(orig, ui, repo, *pats, **opts):
204 204 normal = opts.pop('normal')
205 205 if normal:
206 206 if opts.get('large'):
207 207 raise util.Abort(_('--normal cannot be used with --large'))
208 208 return orig(ui, repo, *pats, **opts)
209 209 bad = addlargefiles(ui, repo, *pats, **opts)
210 210 installnormalfilesmatchfn(repo[None].manifest())
211 211 result = orig(ui, repo, *pats, **opts)
212 212 restorematchfn()
213 213
214 214 return (result == 1 or bad) and 1 or 0
215 215
216 216 def overrideremove(orig, ui, repo, *pats, **opts):
217 217 installnormalfilesmatchfn(repo[None].manifest())
218 218 result = orig(ui, repo, *pats, **opts)
219 219 restorematchfn()
220 220 return removelargefiles(ui, repo, *pats, **opts) or result
221 221
222 222 def overridestatusfn(orig, repo, rev2, **opts):
223 223 try:
224 224 repo._repo.lfstatus = True
225 225 return orig(repo, rev2, **opts)
226 226 finally:
227 227 repo._repo.lfstatus = False
228 228
229 229 def overridestatus(orig, ui, repo, *pats, **opts):
230 230 try:
231 231 repo.lfstatus = True
232 232 return orig(ui, repo, *pats, **opts)
233 233 finally:
234 234 repo.lfstatus = False
235 235
236 236 def overridedirty(orig, repo, ignoreupdate=False):
237 237 try:
238 238 repo._repo.lfstatus = True
239 239 return orig(repo, ignoreupdate)
240 240 finally:
241 241 repo._repo.lfstatus = False
242 242
243 243 def overridelog(orig, ui, repo, *pats, **opts):
244 244 def overridematch(ctx, pats=[], opts={}, globbed=False,
245 245 default='relpath'):
246 246 """Matcher that merges root directory with .hglf, suitable for log.
247 247 It is still possible to match .hglf directly.
248 248 For any listed files run log on the standin too.
249 249 matchfn tries both the given filename and with .hglf stripped.
250 250 """
251 251 match = oldmatch(ctx, pats, opts, globbed, default)
252 252 m = copy.copy(match)
253 253 standins = [lfutil.standin(f) for f in m._files]
254 254 m._files.extend(standins)
255 255 m._fmap = set(m._files)
256 256 m._always = False
257 257 origmatchfn = m.matchfn
258 258 def lfmatchfn(f):
259 259 lf = lfutil.splitstandin(f)
260 260 if lf is not None and origmatchfn(lf):
261 261 return True
262 262 r = origmatchfn(f)
263 263 return r
264 264 m.matchfn = lfmatchfn
265 265 return m
266 266 oldmatch = installmatchfn(overridematch)
267 267 try:
268 268 repo.lfstatus = True
269 269 return orig(ui, repo, *pats, **opts)
270 270 finally:
271 271 repo.lfstatus = False
272 272 restorematchfn()
273 273
274 274 def overrideverify(orig, ui, repo, *pats, **opts):
275 275 large = opts.pop('large', False)
276 276 all = opts.pop('lfa', False)
277 277 contents = opts.pop('lfc', False)
278 278
279 279 result = orig(ui, repo, *pats, **opts)
280 280 if large or all or contents:
281 281 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
282 282 return result
283 283
284 284 def overridedebugstate(orig, ui, repo, *pats, **opts):
285 285 large = opts.pop('large', False)
286 286 if large:
287 287 lfcommands.debugdirstate(ui, repo)
288 288 else:
289 289 orig(ui, repo, *pats, **opts)
290 290
291 291 # Override needs to refresh standins so that update's normal merge
292 292 # will go through properly. Then the other update hook (overriding repo.update)
293 293 # will get the new files. Filemerge is also overridden so that the merge
294 294 # will merge standins correctly.
295 295 def overrideupdate(orig, ui, repo, *pats, **opts):
296 296 lfdirstate = lfutil.openlfdirstate(ui, repo)
297 297 s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
298 298 False, False)
299 299 (unsure, modified, added, removed, missing, unknown, ignored, clean) = s
300 300
301 301 # Need to lock between the standins getting updated and their
302 302 # largefiles getting updated
303 303 wlock = repo.wlock()
304 304 try:
305 305 if opts['check']:
306 306 mod = len(modified) > 0
307 307 for lfile in unsure:
308 308 standin = lfutil.standin(lfile)
309 309 if repo['.'][standin].data().strip() != \
310 310 lfutil.hashfile(repo.wjoin(lfile)):
311 311 mod = True
312 312 else:
313 313 lfdirstate.normal(lfile)
314 314 lfdirstate.write()
315 315 if mod:
316 316 raise util.Abort(_('uncommitted local changes'))
317 317 # XXX handle removed differently
318 318 if not opts['clean']:
319 319 for lfile in unsure + modified + added:
320 320 lfutil.updatestandin(repo, lfutil.standin(lfile))
321 321 finally:
322 322 wlock.release()
323 323 return orig(ui, repo, *pats, **opts)
324 324
325 325 # Before starting the manifest merge, merge.updates will call
326 326 # _checkunknown to check if there are any files in the merged-in
327 327 # changeset that collide with unknown files in the working copy.
328 328 #
329 329 # The largefiles are seen as unknown, so this prevents us from merging
330 330 # in a file 'foo' if we already have a largefile with the same name.
331 331 #
332 332 # The overridden function filters the unknown files by removing any
333 333 # largefiles. This makes the merge proceed and we can then handle this
334 334 # case further in the overridden manifestmerge function below.
335 335 def overridecheckunknownfile(origfn, repo, wctx, mctx, f):
336 336 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
337 337 return False
338 338 return origfn(repo, wctx, mctx, f)
339 339
340 340 # The manifest merge handles conflicts on the manifest level. We want
341 341 # to handle changes in largefile-ness of files at this level too.
342 342 #
343 343 # The strategy is to run the original manifestmerge and then process
344 344 # the action list it outputs. There are two cases we need to deal with:
345 345 #
346 346 # 1. Normal file in p1, largefile in p2. Here the largefile is
347 347 # detected via its standin file, which will enter the working copy
348 348 # with a "get" action. It is not "merge" since the standin is all
349 349 # Mercurial is concerned with at this level -- the link to the
350 350 # existing normal file is not relevant here.
351 351 #
352 352 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
353 353 # since the largefile will be present in the working copy and
354 354 # different from the normal file in p2. Mercurial therefore
355 355 # triggers a merge action.
356 356 #
357 357 # In both cases, we prompt the user and emit new actions to either
358 358 # remove the standin (if the normal file was kept) or to remove the
359 359 # normal file and get the standin (if the largefile was kept). The
360 360 # default prompt answer is to use the largefile version since it was
361 361 # presumably changed on purpose.
362 362 #
363 363 # Finally, the merge.applyupdates function will then take care of
364 364 # writing the files into the working copy and lfcommands.updatelfiles
365 365 # will update the largefiles.
366 366 def overridemanifestmerge(origfn, repo, p1, p2, pa, branchmerge, force,
367 367 partial, acceptremote=False):
368 368 overwrite = force and not branchmerge
369 369 actions = origfn(repo, p1, p2, pa, branchmerge, force, partial,
370 370 acceptremote)
371 371 processed = []
372 372
373 373 for action in actions:
374 374 if overwrite:
375 375 processed.append(action)
376 376 continue
377 377 f, m, args, msg = action
378 378
379 choices = (_('&Largefile'), _('&Normal file'))
380
381 379 splitstandin = lfutil.splitstandin(f)
382 380 if (m == "g" and splitstandin is not None and
383 381 splitstandin in p1 and f in p2):
384 382 # Case 1: normal file in the working copy, largefile in
385 383 # the second parent
386 384 lfile = splitstandin
387 385 standin = f
388 386 msg = _('%s has been turned into a largefile\n'
389 'use (l)argefile or keep as (n)ormal file?') % lfile
390 if repo.ui.promptchoice(msg, choices, 0) == 0:
387 'use (l)argefile or keep as (n)ormal file?'
388 '$$ &Largefile $$ &Normal file') % lfile
389 if repo.ui.promptchoice(msg, 0) == 0:
391 390 processed.append((lfile, "r", None, msg))
392 391 processed.append((standin, "g", (p2.flags(standin),), msg))
393 392 else:
394 393 processed.append((standin, "r", None, msg))
395 394 elif m == "g" and lfutil.standin(f) in p1 and f in p2:
396 395 # Case 2: largefile in the working copy, normal file in
397 396 # the second parent
398 397 standin = lfutil.standin(f)
399 398 lfile = f
400 399 msg = _('%s has been turned into a normal file\n'
401 'keep as (l)argefile or use (n)ormal file?') % lfile
402 if repo.ui.promptchoice(msg, choices, 0) == 0:
400 'keep as (l)argefile or use (n)ormal file?'
401 '$$ &Largefile $$ &Normal file') % lfile
402 if repo.ui.promptchoice(msg, 0) == 0:
403 403 processed.append((lfile, "r", None, msg))
404 404 else:
405 405 processed.append((standin, "r", None, msg))
406 406 processed.append((lfile, "g", (p2.flags(lfile),), msg))
407 407 else:
408 408 processed.append(action)
409 409
410 410 return processed
411 411
412 412 # Override filemerge to prompt the user about how they wish to merge
413 413 # largefiles. This will handle identical edits, and copy/rename +
414 414 # edit without prompting the user.
415 415 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca):
416 416 # Use better variable names here. Because this is a wrapper we cannot
417 417 # change the variable names in the function declaration.
418 418 fcdest, fcother, fcancestor = fcd, fco, fca
419 419 if not lfutil.isstandin(orig):
420 420 return origfn(repo, mynode, orig, fcdest, fcother, fcancestor)
421 421 else:
422 422 if not fcother.cmp(fcdest): # files identical?
423 423 return None
424 424
425 425 # backwards, use working dir parent as ancestor
426 426 if fcancestor == fcother:
427 427 fcancestor = fcdest.parents()[0]
428 428
429 429 if orig != fcother.path():
430 430 repo.ui.status(_('merging %s and %s to %s\n')
431 431 % (lfutil.splitstandin(orig),
432 432 lfutil.splitstandin(fcother.path()),
433 433 lfutil.splitstandin(fcdest.path())))
434 434 else:
435 435 repo.ui.status(_('merging %s\n')
436 436 % lfutil.splitstandin(fcdest.path()))
437 437
438 438 if fcancestor.path() != fcother.path() and fcother.data() == \
439 439 fcancestor.data():
440 440 return 0
441 441 if fcancestor.path() != fcdest.path() and fcdest.data() == \
442 442 fcancestor.data():
443 443 repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
444 444 return 0
445 445
446 446 if repo.ui.promptchoice(_('largefile %s has a merge conflict\n'
447 'keep (l)ocal or take (o)ther?') %
448 lfutil.splitstandin(orig),
449 (_('&Local'), _('&Other')), 0) == 0:
447 'keep (l)ocal or take (o)ther?'
448 '$$ &Local $$ &Other') %
449 lfutil.splitstandin(orig), 0) == 0:
450 450 return 0
451 451 else:
452 452 repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
453 453 return 0
454 454
455 455 # Copy first changes the matchers to match standins instead of
456 456 # largefiles. Then it overrides util.copyfile in that function it
457 457 # checks if the destination largefile already exists. It also keeps a
458 458 # list of copied files so that the largefiles can be copied and the
459 459 # dirstate updated.
460 460 def overridecopy(orig, ui, repo, pats, opts, rename=False):
461 461 # doesn't remove largefile on rename
462 462 if len(pats) < 2:
463 463 # this isn't legal, let the original function deal with it
464 464 return orig(ui, repo, pats, opts, rename)
465 465
466 466 def makestandin(relpath):
467 467 path = scmutil.canonpath(repo.root, repo.getcwd(), relpath)
468 468 return os.path.join(repo.wjoin(lfutil.standin(path)))
469 469
470 470 fullpats = scmutil.expandpats(pats)
471 471 dest = fullpats[-1]
472 472
473 473 if os.path.isdir(dest):
474 474 if not os.path.isdir(makestandin(dest)):
475 475 os.makedirs(makestandin(dest))
476 476 # This could copy both lfiles and normal files in one command,
477 477 # but we don't want to do that. First replace their matcher to
478 478 # only match normal files and run it, then replace it to just
479 479 # match largefiles and run it again.
480 480 nonormalfiles = False
481 481 nolfiles = False
482 482 try:
483 483 try:
484 484 installnormalfilesmatchfn(repo[None].manifest())
485 485 result = orig(ui, repo, pats, opts, rename)
486 486 except util.Abort, e:
487 487 if str(e) != _('no files to copy'):
488 488 raise e
489 489 else:
490 490 nonormalfiles = True
491 491 result = 0
492 492 finally:
493 493 restorematchfn()
494 494
495 495 # The first rename can cause our current working directory to be removed.
496 496 # In that case there is nothing left to copy/rename so just quit.
497 497 try:
498 498 repo.getcwd()
499 499 except OSError:
500 500 return result
501 501
502 502 try:
503 503 try:
504 504 # When we call orig below it creates the standins but we don't add
505 505 # them to the dir state until later so lock during that time.
506 506 wlock = repo.wlock()
507 507
508 508 manifest = repo[None].manifest()
509 509 oldmatch = None # for the closure
510 510 def overridematch(ctx, pats=[], opts={}, globbed=False,
511 511 default='relpath'):
512 512 newpats = []
513 513 # The patterns were previously mangled to add the standin
514 514 # directory; we need to remove that now
515 515 for pat in pats:
516 516 if match_.patkind(pat) is None and lfutil.shortname in pat:
517 517 newpats.append(pat.replace(lfutil.shortname, ''))
518 518 else:
519 519 newpats.append(pat)
520 520 match = oldmatch(ctx, newpats, opts, globbed, default)
521 521 m = copy.copy(match)
522 522 lfile = lambda f: lfutil.standin(f) in manifest
523 523 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
524 524 m._fmap = set(m._files)
525 525 m._always = False
526 526 origmatchfn = m.matchfn
527 527 m.matchfn = lambda f: (lfutil.isstandin(f) and
528 528 (f in manifest) and
529 529 origmatchfn(lfutil.splitstandin(f)) or
530 530 None)
531 531 return m
532 532 oldmatch = installmatchfn(overridematch)
533 533 listpats = []
534 534 for pat in pats:
535 535 if match_.patkind(pat) is not None:
536 536 listpats.append(pat)
537 537 else:
538 538 listpats.append(makestandin(pat))
539 539
540 540 try:
541 541 origcopyfile = util.copyfile
542 542 copiedfiles = []
543 543 def overridecopyfile(src, dest):
544 544 if (lfutil.shortname in src and
545 545 dest.startswith(repo.wjoin(lfutil.shortname))):
546 546 destlfile = dest.replace(lfutil.shortname, '')
547 547 if not opts['force'] and os.path.exists(destlfile):
548 548 raise IOError('',
549 549 _('destination largefile already exists'))
550 550 copiedfiles.append((src, dest))
551 551 origcopyfile(src, dest)
552 552
553 553 util.copyfile = overridecopyfile
554 554 result += orig(ui, repo, listpats, opts, rename)
555 555 finally:
556 556 util.copyfile = origcopyfile
557 557
558 558 lfdirstate = lfutil.openlfdirstate(ui, repo)
559 559 for (src, dest) in copiedfiles:
560 560 if (lfutil.shortname in src and
561 561 dest.startswith(repo.wjoin(lfutil.shortname))):
562 562 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
563 563 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
564 564 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
565 565 if not os.path.isdir(destlfiledir):
566 566 os.makedirs(destlfiledir)
567 567 if rename:
568 568 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
569 569 lfdirstate.remove(srclfile)
570 570 else:
571 571 util.copyfile(repo.wjoin(srclfile),
572 572 repo.wjoin(destlfile))
573 573
574 574 lfdirstate.add(destlfile)
575 575 lfdirstate.write()
576 576 except util.Abort, e:
577 577 if str(e) != _('no files to copy'):
578 578 raise e
579 579 else:
580 580 nolfiles = True
581 581 finally:
582 582 restorematchfn()
583 583 wlock.release()
584 584
585 585 if nolfiles and nonormalfiles:
586 586 raise util.Abort(_('no files to copy'))
587 587
588 588 return result
589 589
590 590 # When the user calls revert, we have to be careful to not revert any
591 591 # changes to other largefiles accidentally. This means we have to keep
592 592 # track of the largefiles that are being reverted so we only pull down
593 593 # the necessary largefiles.
594 594 #
595 595 # Standins are only updated (to match the hash of largefiles) before
596 596 # commits. Update the standins then run the original revert, changing
597 597 # the matcher to hit standins instead of largefiles. Based on the
598 598 # resulting standins update the largefiles. Then return the standins
599 599 # to their proper state
600 600 def overriderevert(orig, ui, repo, *pats, **opts):
601 601 # Because we put the standins in a bad state (by updating them)
602 602 # and then return them to a correct state we need to lock to
603 603 # prevent others from changing them in their incorrect state.
604 604 wlock = repo.wlock()
605 605 try:
606 606 lfdirstate = lfutil.openlfdirstate(ui, repo)
607 607 (modified, added, removed, missing, unknown, ignored, clean) = \
608 608 lfutil.lfdirstatestatus(lfdirstate, repo, repo['.'].rev())
609 609 lfdirstate.write()
610 610 for lfile in modified:
611 611 lfutil.updatestandin(repo, lfutil.standin(lfile))
612 612 for lfile in missing:
613 613 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
614 614 os.unlink(repo.wjoin(lfutil.standin(lfile)))
615 615
616 616 try:
617 617 ctx = scmutil.revsingle(repo, opts.get('rev'))
618 618 oldmatch = None # for the closure
619 619 def overridematch(ctx, pats=[], opts={}, globbed=False,
620 620 default='relpath'):
621 621 match = oldmatch(ctx, pats, opts, globbed, default)
622 622 m = copy.copy(match)
623 623 def tostandin(f):
624 624 if lfutil.standin(f) in ctx:
625 625 return lfutil.standin(f)
626 626 elif lfutil.standin(f) in repo[None]:
627 627 return None
628 628 return f
629 629 m._files = [tostandin(f) for f in m._files]
630 630 m._files = [f for f in m._files if f is not None]
631 631 m._fmap = set(m._files)
632 632 m._always = False
633 633 origmatchfn = m.matchfn
634 634 def matchfn(f):
635 635 if lfutil.isstandin(f):
636 636 # We need to keep track of what largefiles are being
637 637 # matched so we know which ones to update later --
638 638 # otherwise we accidentally revert changes to other
639 639 # largefiles. This is repo-specific, so duckpunch the
640 640 # repo object to keep the list of largefiles for us
641 641 # later.
642 642 if origmatchfn(lfutil.splitstandin(f)) and \
643 643 (f in repo[None] or f in ctx):
644 644 lfileslist = getattr(repo, '_lfilestoupdate', [])
645 645 lfileslist.append(lfutil.splitstandin(f))
646 646 repo._lfilestoupdate = lfileslist
647 647 return True
648 648 else:
649 649 return False
650 650 return origmatchfn(f)
651 651 m.matchfn = matchfn
652 652 return m
653 653 oldmatch = installmatchfn(overridematch)
654 654 scmutil.match
655 655 matches = overridematch(repo[None], pats, opts)
656 656 orig(ui, repo, *pats, **opts)
657 657 finally:
658 658 restorematchfn()
659 659 lfileslist = getattr(repo, '_lfilestoupdate', [])
660 660 lfcommands.updatelfiles(ui, repo, filelist=lfileslist,
661 661 printmessage=False)
662 662
663 663 # empty out the largefiles list so we start fresh next time
664 664 repo._lfilestoupdate = []
665 665 for lfile in modified:
666 666 if lfile in lfileslist:
667 667 if os.path.exists(repo.wjoin(lfutil.standin(lfile))) and lfile\
668 668 in repo['.']:
669 669 lfutil.writestandin(repo, lfutil.standin(lfile),
670 670 repo['.'][lfile].data().strip(),
671 671 'x' in repo['.'][lfile].flags())
672 672 lfdirstate = lfutil.openlfdirstate(ui, repo)
673 673 for lfile in added:
674 674 standin = lfutil.standin(lfile)
675 675 if standin not in ctx and (standin in matches or opts.get('all')):
676 676 if lfile in lfdirstate:
677 677 lfdirstate.drop(lfile)
678 678 util.unlinkpath(repo.wjoin(standin))
679 679 lfdirstate.write()
680 680 finally:
681 681 wlock.release()
682 682
683 683 def hgupdaterepo(orig, repo, node, overwrite):
684 684 if not overwrite:
685 685 # Only call updatelfiles on the standins that have changed to save time
686 686 oldstandins = lfutil.getstandinsstate(repo)
687 687
688 688 result = orig(repo, node, overwrite)
689 689
690 690 filelist = None
691 691 if not overwrite:
692 692 newstandins = lfutil.getstandinsstate(repo)
693 693 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
694 694 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist)
695 695 return result
696 696
697 697 def hgmerge(orig, repo, node, force=None, remind=True):
698 698 result = orig(repo, node, force, remind)
699 699 lfcommands.updatelfiles(repo.ui, repo)
700 700 return result
701 701
702 702 # When we rebase a repository with remotely changed largefiles, we need to
703 703 # take some extra care so that the largefiles are correctly updated in the
704 704 # working copy
705 705 def overridepull(orig, ui, repo, source=None, **opts):
706 706 revsprepull = len(repo)
707 707 if not source:
708 708 source = 'default'
709 709 repo.lfpullsource = source
710 710 if opts.get('rebase', False):
711 711 repo._isrebasing = True
712 712 try:
713 713 if opts.get('update'):
714 714 del opts['update']
715 715 ui.debug('--update and --rebase are not compatible, ignoring '
716 716 'the update flag\n')
717 717 del opts['rebase']
718 718 cmdutil.bailifchanged(repo)
719 719 origpostincoming = commands.postincoming
720 720 def _dummy(*args, **kwargs):
721 721 pass
722 722 commands.postincoming = _dummy
723 723 try:
724 724 result = commands.pull(ui, repo, source, **opts)
725 725 finally:
726 726 commands.postincoming = origpostincoming
727 727 revspostpull = len(repo)
728 728 if revspostpull > revsprepull:
729 729 result = result or rebase.rebase(ui, repo)
730 730 finally:
731 731 repo._isrebasing = False
732 732 else:
733 733 result = orig(ui, repo, source, **opts)
734 734 revspostpull = len(repo)
735 735 lfrevs = opts.get('lfrev', [])
736 736 if opts.get('all_largefiles'):
737 737 lfrevs.append('pulled()')
738 738 if lfrevs and revspostpull > revsprepull:
739 739 numcached = 0
740 740 repo.firstpulled = revsprepull # for pulled() revset expression
741 741 try:
742 742 for rev in scmutil.revrange(repo, lfrevs):
743 743 ui.note(_('pulling largefiles for revision %s\n') % rev)
744 744 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
745 745 numcached += len(cached)
746 746 finally:
747 747 del repo.firstpulled
748 748 ui.status(_("%d largefiles cached\n") % numcached)
749 749 return result
750 750
751 751 def pulledrevsetsymbol(repo, subset, x):
752 752 """``pulled()``
753 753 Changesets that just has been pulled.
754 754
755 755 Only available with largefiles from pull --lfrev expressions.
756 756
757 757 .. container:: verbose
758 758
759 759 Some examples:
760 760
761 761 - pull largefiles for all new changesets::
762 762
763 763 hg pull -lfrev "pulled()"
764 764
765 765 - pull largefiles for all new branch heads::
766 766
767 767 hg pull -lfrev "head(pulled()) and not closed()"
768 768
769 769 """
770 770
771 771 try:
772 772 firstpulled = repo.firstpulled
773 773 except AttributeError:
774 774 raise util.Abort(_("pulled() only available in --lfrev"))
775 775 return [r for r in subset if r >= firstpulled]
776 776
777 777 def overrideclone(orig, ui, source, dest=None, **opts):
778 778 d = dest
779 779 if d is None:
780 780 d = hg.defaultdest(source)
781 781 if opts.get('all_largefiles') and not hg.islocal(d):
782 782 raise util.Abort(_(
783 783 '--all-largefiles is incompatible with non-local destination %s' %
784 784 d))
785 785
786 786 return orig(ui, source, dest, **opts)
787 787
788 788 def hgclone(orig, ui, opts, *args, **kwargs):
789 789 result = orig(ui, opts, *args, **kwargs)
790 790
791 791 if result is not None:
792 792 sourcerepo, destrepo = result
793 793 repo = destrepo.local()
794 794
795 795 # Caching is implicitly limited to 'rev' option, since the dest repo was
796 796 # truncated at that point. The user may expect a download count with
797 797 # this option, so attempt whether or not this is a largefile repo.
798 798 if opts.get('all_largefiles'):
799 799 success, missing = lfcommands.downloadlfiles(ui, repo, None)
800 800
801 801 if missing != 0:
802 802 return None
803 803
804 804 return result
805 805
806 806 def overriderebase(orig, ui, repo, **opts):
807 807 repo._isrebasing = True
808 808 try:
809 809 return orig(ui, repo, **opts)
810 810 finally:
811 811 repo._isrebasing = False
812 812
813 813 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
814 814 prefix=None, mtime=None, subrepos=None):
815 815 # No need to lock because we are only reading history and
816 816 # largefile caches, neither of which are modified.
817 817 lfcommands.cachelfiles(repo.ui, repo, node)
818 818
819 819 if kind not in archival.archivers:
820 820 raise util.Abort(_("unknown archive type '%s'") % kind)
821 821
822 822 ctx = repo[node]
823 823
824 824 if kind == 'files':
825 825 if prefix:
826 826 raise util.Abort(
827 827 _('cannot give prefix when archiving to files'))
828 828 else:
829 829 prefix = archival.tidyprefix(dest, kind, prefix)
830 830
831 831 def write(name, mode, islink, getdata):
832 832 if matchfn and not matchfn(name):
833 833 return
834 834 data = getdata()
835 835 if decode:
836 836 data = repo.wwritedata(name, data)
837 837 archiver.addfile(prefix + name, mode, islink, data)
838 838
839 839 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
840 840
841 841 if repo.ui.configbool("ui", "archivemeta", True):
842 842 def metadata():
843 843 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
844 844 hex(repo.changelog.node(0)), hex(node), ctx.branch())
845 845
846 846 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
847 847 if repo.tagtype(t) == 'global')
848 848 if not tags:
849 849 repo.ui.pushbuffer()
850 850 opts = {'template': '{latesttag}\n{latesttagdistance}',
851 851 'style': '', 'patch': None, 'git': None}
852 852 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
853 853 ltags, dist = repo.ui.popbuffer().split('\n')
854 854 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
855 855 tags += 'latesttagdistance: %s\n' % dist
856 856
857 857 return base + tags
858 858
859 859 write('.hg_archival.txt', 0644, False, metadata)
860 860
861 861 for f in ctx:
862 862 ff = ctx.flags(f)
863 863 getdata = ctx[f].data
864 864 if lfutil.isstandin(f):
865 865 path = lfutil.findfile(repo, getdata().strip())
866 866 if path is None:
867 867 raise util.Abort(
868 868 _('largefile %s not found in repo store or system cache')
869 869 % lfutil.splitstandin(f))
870 870 f = lfutil.splitstandin(f)
871 871
872 872 def getdatafn():
873 873 fd = None
874 874 try:
875 875 fd = open(path, 'rb')
876 876 return fd.read()
877 877 finally:
878 878 if fd:
879 879 fd.close()
880 880
881 881 getdata = getdatafn
882 882 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
883 883
884 884 if subrepos:
885 885 for subpath in sorted(ctx.substate):
886 886 sub = ctx.sub(subpath)
887 887 submatch = match_.narrowmatcher(subpath, matchfn)
888 888 sub.archive(repo.ui, archiver, prefix, submatch)
889 889
890 890 archiver.done()
891 891
892 892 def hgsubrepoarchive(orig, repo, ui, archiver, prefix, match=None):
893 893 repo._get(repo._state + ('hg',))
894 894 rev = repo._state[1]
895 895 ctx = repo._repo[rev]
896 896
897 897 lfcommands.cachelfiles(ui, repo._repo, ctx.node())
898 898
899 899 def write(name, mode, islink, getdata):
900 900 # At this point, the standin has been replaced with the largefile name,
901 901 # so the normal matcher works here without the lfutil variants.
902 902 if match and not match(f):
903 903 return
904 904 data = getdata()
905 905
906 906 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
907 907
908 908 for f in ctx:
909 909 ff = ctx.flags(f)
910 910 getdata = ctx[f].data
911 911 if lfutil.isstandin(f):
912 912 path = lfutil.findfile(repo._repo, getdata().strip())
913 913 if path is None:
914 914 raise util.Abort(
915 915 _('largefile %s not found in repo store or system cache')
916 916 % lfutil.splitstandin(f))
917 917 f = lfutil.splitstandin(f)
918 918
919 919 def getdatafn():
920 920 fd = None
921 921 try:
922 922 fd = open(os.path.join(prefix, path), 'rb')
923 923 return fd.read()
924 924 finally:
925 925 if fd:
926 926 fd.close()
927 927
928 928 getdata = getdatafn
929 929
930 930 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
931 931
932 932 for subpath in sorted(ctx.substate):
933 933 sub = ctx.sub(subpath)
934 934 submatch = match_.narrowmatcher(subpath, match)
935 935 sub.archive(ui, archiver, os.path.join(prefix, repo._path) + '/',
936 936 submatch)
937 937
938 938 # If a largefile is modified, the change is not reflected in its
939 939 # standin until a commit. cmdutil.bailifchanged() raises an exception
940 940 # if the repo has uncommitted changes. Wrap it to also check if
941 941 # largefiles were changed. This is used by bisect and backout.
942 942 def overridebailifchanged(orig, repo):
943 943 orig(repo)
944 944 repo.lfstatus = True
945 945 modified, added, removed, deleted = repo.status()[:4]
946 946 repo.lfstatus = False
947 947 if modified or added or removed or deleted:
948 948 raise util.Abort(_('outstanding uncommitted changes'))
949 949
950 950 # Fetch doesn't use cmdutil.bailifchanged so override it to add the check
951 951 def overridefetch(orig, ui, repo, *pats, **opts):
952 952 repo.lfstatus = True
953 953 modified, added, removed, deleted = repo.status()[:4]
954 954 repo.lfstatus = False
955 955 if modified or added or removed or deleted:
956 956 raise util.Abort(_('outstanding uncommitted changes'))
957 957 return orig(ui, repo, *pats, **opts)
958 958
959 959 def overrideforget(orig, ui, repo, *pats, **opts):
960 960 installnormalfilesmatchfn(repo[None].manifest())
961 961 result = orig(ui, repo, *pats, **opts)
962 962 restorematchfn()
963 963 m = scmutil.match(repo[None], pats, opts)
964 964
965 965 try:
966 966 repo.lfstatus = True
967 967 s = repo.status(match=m, clean=True)
968 968 finally:
969 969 repo.lfstatus = False
970 970 forget = sorted(s[0] + s[1] + s[3] + s[6])
971 971 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
972 972
973 973 for f in forget:
974 974 if lfutil.standin(f) not in repo.dirstate and not \
975 975 os.path.isdir(m.rel(lfutil.standin(f))):
976 976 ui.warn(_('not removing %s: file is already untracked\n')
977 977 % m.rel(f))
978 978 result = 1
979 979
980 980 for f in forget:
981 981 if ui.verbose or not m.exact(f):
982 982 ui.status(_('removing %s\n') % m.rel(f))
983 983
984 984 # Need to lock because standin files are deleted then removed from the
985 985 # repository and we could race in-between.
986 986 wlock = repo.wlock()
987 987 try:
988 988 lfdirstate = lfutil.openlfdirstate(ui, repo)
989 989 for f in forget:
990 990 if lfdirstate[f] == 'a':
991 991 lfdirstate.drop(f)
992 992 else:
993 993 lfdirstate.remove(f)
994 994 lfdirstate.write()
995 995 standins = [lfutil.standin(f) for f in forget]
996 996 for f in standins:
997 997 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
998 998 repo[None].forget(standins)
999 999 finally:
1000 1000 wlock.release()
1001 1001
1002 1002 return result
1003 1003
1004 1004 def getoutgoinglfiles(ui, repo, dest=None, **opts):
1005 1005 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1006 1006 dest, branches = hg.parseurl(dest, opts.get('branch'))
1007 1007 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
1008 1008 if revs:
1009 1009 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
1010 1010
1011 1011 try:
1012 1012 remote = hg.peer(repo, opts, dest)
1013 1013 except error.RepoError:
1014 1014 return None
1015 1015 outgoing = discovery.findcommonoutgoing(repo, remote.peer(), force=False)
1016 1016 if not outgoing.missing:
1017 1017 return outgoing.missing
1018 1018 o = repo.changelog.nodesbetween(outgoing.missing, revs)[0]
1019 1019 if opts.get('newest_first'):
1020 1020 o.reverse()
1021 1021
1022 1022 toupload = set()
1023 1023 for n in o:
1024 1024 parents = [p for p in repo.changelog.parents(n) if p != node.nullid]
1025 1025 ctx = repo[n]
1026 1026 files = set(ctx.files())
1027 1027 if len(parents) == 2:
1028 1028 mc = ctx.manifest()
1029 1029 mp1 = ctx.parents()[0].manifest()
1030 1030 mp2 = ctx.parents()[1].manifest()
1031 1031 for f in mp1:
1032 1032 if f not in mc:
1033 1033 files.add(f)
1034 1034 for f in mp2:
1035 1035 if f not in mc:
1036 1036 files.add(f)
1037 1037 for f in mc:
1038 1038 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
1039 1039 files.add(f)
1040 1040 toupload = toupload.union(
1041 1041 set([f for f in files if lfutil.isstandin(f) and f in ctx]))
1042 1042 return sorted(toupload)
1043 1043
1044 1044 def overrideoutgoing(orig, ui, repo, dest=None, **opts):
1045 1045 result = orig(ui, repo, dest, **opts)
1046 1046
1047 1047 if opts.pop('large', None):
1048 1048 toupload = getoutgoinglfiles(ui, repo, dest, **opts)
1049 1049 if toupload is None:
1050 1050 ui.status(_('largefiles: No remote repo\n'))
1051 1051 elif not toupload:
1052 1052 ui.status(_('largefiles: no files to upload\n'))
1053 1053 else:
1054 1054 ui.status(_('largefiles to upload:\n'))
1055 1055 for file in toupload:
1056 1056 ui.status(lfutil.splitstandin(file) + '\n')
1057 1057 ui.status('\n')
1058 1058
1059 1059 return result
1060 1060
1061 1061 def overridesummary(orig, ui, repo, *pats, **opts):
1062 1062 try:
1063 1063 repo.lfstatus = True
1064 1064 orig(ui, repo, *pats, **opts)
1065 1065 finally:
1066 1066 repo.lfstatus = False
1067 1067
1068 1068 if opts.pop('large', None):
1069 1069 toupload = getoutgoinglfiles(ui, repo, None, **opts)
1070 1070 if toupload is None:
1071 1071 # i18n: column positioning for "hg summary"
1072 1072 ui.status(_('largefiles: (no remote repo)\n'))
1073 1073 elif not toupload:
1074 1074 # i18n: column positioning for "hg summary"
1075 1075 ui.status(_('largefiles: (no files to upload)\n'))
1076 1076 else:
1077 1077 # i18n: column positioning for "hg summary"
1078 1078 ui.status(_('largefiles: %d to upload\n') % len(toupload))
1079 1079
1080 1080 def scmutiladdremove(orig, repo, pats=[], opts={}, dry_run=None,
1081 1081 similarity=None):
1082 1082 if not lfutil.islfilesrepo(repo):
1083 1083 return orig(repo, pats, opts, dry_run, similarity)
1084 1084 # Get the list of missing largefiles so we can remove them
1085 1085 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1086 1086 s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
1087 1087 False, False)
1088 1088 (unsure, modified, added, removed, missing, unknown, ignored, clean) = s
1089 1089
1090 1090 # Call into the normal remove code, but the removing of the standin, we want
1091 1091 # to have handled by original addremove. Monkey patching here makes sure
1092 1092 # we don't remove the standin in the largefiles code, preventing a very
1093 1093 # confused state later.
1094 1094 if missing:
1095 1095 m = [repo.wjoin(f) for f in missing]
1096 1096 repo._isaddremove = True
1097 1097 removelargefiles(repo.ui, repo, *m, **opts)
1098 1098 repo._isaddremove = False
1099 1099 # Call into the normal add code, and any files that *should* be added as
1100 1100 # largefiles will be
1101 1101 addlargefiles(repo.ui, repo, *pats, **opts)
1102 1102 # Now that we've handled largefiles, hand off to the original addremove
1103 1103 # function to take care of the rest. Make sure it doesn't do anything with
1104 1104 # largefiles by installing a matcher that will ignore them.
1105 1105 installnormalfilesmatchfn(repo[None].manifest())
1106 1106 result = orig(repo, pats, opts, dry_run, similarity)
1107 1107 restorematchfn()
1108 1108 return result
1109 1109
1110 1110 # Calling purge with --all will cause the largefiles to be deleted.
1111 1111 # Override repo.status to prevent this from happening.
1112 1112 def overridepurge(orig, ui, repo, *dirs, **opts):
1113 1113 # XXX large file status is buggy when used on repo proxy.
1114 1114 # XXX this needs to be investigate.
1115 1115 repo = repo.unfiltered()
1116 1116 oldstatus = repo.status
1117 1117 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1118 1118 clean=False, unknown=False, listsubrepos=False):
1119 1119 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1120 1120 listsubrepos)
1121 1121 lfdirstate = lfutil.openlfdirstate(ui, repo)
1122 1122 modified, added, removed, deleted, unknown, ignored, clean = r
1123 1123 unknown = [f for f in unknown if lfdirstate[f] == '?']
1124 1124 ignored = [f for f in ignored if lfdirstate[f] == '?']
1125 1125 return modified, added, removed, deleted, unknown, ignored, clean
1126 1126 repo.status = overridestatus
1127 1127 orig(ui, repo, *dirs, **opts)
1128 1128 repo.status = oldstatus
1129 1129
1130 1130 def overriderollback(orig, ui, repo, **opts):
1131 1131 result = orig(ui, repo, **opts)
1132 1132 merge.update(repo, node=None, branchmerge=False, force=True,
1133 1133 partial=lfutil.isstandin)
1134 1134 wlock = repo.wlock()
1135 1135 try:
1136 1136 lfdirstate = lfutil.openlfdirstate(ui, repo)
1137 1137 lfiles = lfutil.listlfiles(repo)
1138 1138 oldlfiles = lfutil.listlfiles(repo, repo[None].parents()[0].rev())
1139 1139 for file in lfiles:
1140 1140 if file in oldlfiles:
1141 1141 lfdirstate.normallookup(file)
1142 1142 else:
1143 1143 lfdirstate.add(file)
1144 1144 lfdirstate.write()
1145 1145 finally:
1146 1146 wlock.release()
1147 1147 return result
1148 1148
1149 1149 def overridetransplant(orig, ui, repo, *revs, **opts):
1150 1150 try:
1151 1151 oldstandins = lfutil.getstandinsstate(repo)
1152 1152 repo._istransplanting = True
1153 1153 result = orig(ui, repo, *revs, **opts)
1154 1154 newstandins = lfutil.getstandinsstate(repo)
1155 1155 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1156 1156 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1157 1157 printmessage=True)
1158 1158 finally:
1159 1159 repo._istransplanting = False
1160 1160 return result
1161 1161
1162 1162 def overridecat(orig, ui, repo, file1, *pats, **opts):
1163 1163 ctx = scmutil.revsingle(repo, opts.get('rev'))
1164 1164 err = 1
1165 1165 notbad = set()
1166 1166 m = scmutil.match(ctx, (file1,) + pats, opts)
1167 1167 origmatchfn = m.matchfn
1168 1168 def lfmatchfn(f):
1169 1169 lf = lfutil.splitstandin(f)
1170 1170 if lf is None:
1171 1171 return origmatchfn(f)
1172 1172 notbad.add(lf)
1173 1173 return origmatchfn(lf)
1174 1174 m.matchfn = lfmatchfn
1175 1175 origbadfn = m.bad
1176 1176 def lfbadfn(f, msg):
1177 1177 if not f in notbad:
1178 1178 return origbadfn(f, msg)
1179 1179 m.bad = lfbadfn
1180 1180 for f in ctx.walk(m):
1181 1181 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1182 1182 pathname=f)
1183 1183 lf = lfutil.splitstandin(f)
1184 1184 if lf is None:
1185 1185 # duplicating unreachable code from commands.cat
1186 1186 data = ctx[f].data()
1187 1187 if opts.get('decode'):
1188 1188 data = repo.wwritedata(f, data)
1189 1189 fp.write(data)
1190 1190 else:
1191 1191 hash = lfutil.readstandin(repo, lf, ctx.rev())
1192 1192 if not lfutil.inusercache(repo.ui, hash):
1193 1193 store = basestore._openstore(repo)
1194 1194 success, missing = store.get([(lf, hash)])
1195 1195 if len(success) != 1:
1196 1196 raise util.Abort(
1197 1197 _('largefile %s is not in cache and could not be '
1198 1198 'downloaded') % lf)
1199 1199 path = lfutil.usercachepath(repo.ui, hash)
1200 1200 fpin = open(path, "rb")
1201 1201 for chunk in util.filechunkiter(fpin, 128 * 1024):
1202 1202 fp.write(chunk)
1203 1203 fpin.close()
1204 1204 fp.close()
1205 1205 err = 0
1206 1206 return err
1207 1207
1208 1208 def mercurialsinkbefore(orig, sink):
1209 1209 sink.repo._isconverting = True
1210 1210 orig(sink)
1211 1211
1212 1212 def mercurialsinkafter(orig, sink):
1213 1213 sink.repo._isconverting = False
1214 1214 orig(sink)
@@ -1,561 +1,561 b''
1 1 # patchbomb.py - sending Mercurial changesets as patch emails
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to send changesets as (a series of) patch emails
9 9
10 10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 11 describes the series as a whole.
12 12
13 13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 14 first line of the changeset description as the subject text. The
15 15 message contains two or three body parts:
16 16
17 17 - The changeset description.
18 18 - [Optional] The result of running diffstat on the patch.
19 19 - The patch itself, as generated by :hg:`export`.
20 20
21 21 Each message refers to the first in the series using the In-Reply-To
22 22 and References headers, so they will show up as a sequence in threaded
23 23 mail and news readers, and in mail archives.
24 24
25 25 To configure other defaults, add a section like this to your
26 26 configuration file::
27 27
28 28 [email]
29 29 from = My Name <my@email>
30 30 to = recipient1, recipient2, ...
31 31 cc = cc1, cc2, ...
32 32 bcc = bcc1, bcc2, ...
33 33 reply-to = address1, address2, ...
34 34
35 35 Use ``[patchbomb]`` as configuration section name if you need to
36 36 override global ``[email]`` address settings.
37 37
38 38 Then you can use the :hg:`email` command to mail a series of
39 39 changesets as a patchbomb.
40 40
41 41 You can also either configure the method option in the email section
42 42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 43 that the patchbomb extension can automatically send patchbombs
44 44 directly from the commandline. See the [email] and [smtp] sections in
45 45 hgrc(5) for details.
46 46 '''
47 47
48 48 import os, errno, socket, tempfile, cStringIO
49 49 import email.MIMEMultipart, email.MIMEBase
50 50 import email.Utils, email.Encoders, email.Generator
51 51 from mercurial import cmdutil, commands, hg, mail, patch, util
52 52 from mercurial import scmutil
53 53 from mercurial.i18n import _
54 54 from mercurial.node import bin
55 55
56 56 cmdtable = {}
57 57 command = cmdutil.command(cmdtable)
58 58 testedwith = 'internal'
59 59
60 60 def prompt(ui, prompt, default=None, rest=':'):
61 61 if default:
62 62 prompt += ' [%s]' % default
63 63 return ui.prompt(prompt + rest, default)
64 64
65 65 def introwanted(opts, number):
66 66 '''is an introductory message apparently wanted?'''
67 67 return number > 1 or opts.get('intro') or opts.get('desc')
68 68
69 69 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
70 70 patchname=None):
71 71
72 72 desc = []
73 73 node = None
74 74 body = ''
75 75
76 76 for line in patchlines:
77 77 if line.startswith('#'):
78 78 if line.startswith('# Node ID'):
79 79 node = line.split()[-1]
80 80 continue
81 81 if line.startswith('diff -r') or line.startswith('diff --git'):
82 82 break
83 83 desc.append(line)
84 84
85 85 if not patchname and not node:
86 86 raise ValueError
87 87
88 88 if opts.get('attach') and not opts.get('body'):
89 89 body = ('\n'.join(desc[1:]).strip() or
90 90 'Patch subject is complete summary.')
91 91 body += '\n\n\n'
92 92
93 93 if opts.get('plain'):
94 94 while patchlines and patchlines[0].startswith('# '):
95 95 patchlines.pop(0)
96 96 if patchlines:
97 97 patchlines.pop(0)
98 98 while patchlines and not patchlines[0].strip():
99 99 patchlines.pop(0)
100 100
101 101 ds = patch.diffstat(patchlines, git=opts.get('git'))
102 102 if opts.get('diffstat'):
103 103 body += ds + '\n\n'
104 104
105 105 addattachment = opts.get('attach') or opts.get('inline')
106 106 if not addattachment or opts.get('body'):
107 107 body += '\n'.join(patchlines)
108 108
109 109 if addattachment:
110 110 msg = email.MIMEMultipart.MIMEMultipart()
111 111 if body:
112 112 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
113 113 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
114 114 opts.get('test'))
115 115 binnode = bin(node)
116 116 # if node is mq patch, it will have the patch file's name as a tag
117 117 if not patchname:
118 118 patchtags = [t for t in repo.nodetags(binnode)
119 119 if t.endswith('.patch') or t.endswith('.diff')]
120 120 if patchtags:
121 121 patchname = patchtags[0]
122 122 elif total > 1:
123 123 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
124 124 binnode, seqno=idx,
125 125 total=total)
126 126 else:
127 127 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
128 128 disposition = 'inline'
129 129 if opts.get('attach'):
130 130 disposition = 'attachment'
131 131 p['Content-Disposition'] = disposition + '; filename=' + patchname
132 132 msg.attach(p)
133 133 else:
134 134 msg = mail.mimetextpatch(body, display=opts.get('test'))
135 135
136 136 flag = ' '.join(opts.get('flag'))
137 137 if flag:
138 138 flag = ' ' + flag
139 139
140 140 subj = desc[0].strip().rstrip('. ')
141 141 if not numbered:
142 142 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
143 143 else:
144 144 tlen = len(str(total))
145 145 subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
146 146 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
147 147 msg['X-Mercurial-Node'] = node
148 148 return msg, subj, ds
149 149
150 150 emailopts = [
151 151 ('', 'body', None, _('send patches as inline message text (default)')),
152 152 ('a', 'attach', None, _('send patches as attachments')),
153 153 ('i', 'inline', None, _('send patches as inline attachments')),
154 154 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
155 155 ('c', 'cc', [], _('email addresses of copy recipients')),
156 156 ('', 'confirm', None, _('ask for confirmation before sending')),
157 157 ('d', 'diffstat', None, _('add diffstat output to messages')),
158 158 ('', 'date', '', _('use the given date as the sending date')),
159 159 ('', 'desc', '', _('use the given file as the series description')),
160 160 ('f', 'from', '', _('email address of sender')),
161 161 ('n', 'test', None, _('print messages that would be sent')),
162 162 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
163 163 ('', 'reply-to', [], _('email addresses replies should be sent to')),
164 164 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
165 165 ('', 'in-reply-to', '', _('message identifier to reply to')),
166 166 ('', 'flag', [], _('flags to add in subject prefixes')),
167 167 ('t', 'to', [], _('email addresses of recipients'))]
168 168
169 169 @command('email',
170 170 [('g', 'git', None, _('use git extended diff format')),
171 171 ('', 'plain', None, _('omit hg patch header')),
172 172 ('o', 'outgoing', None,
173 173 _('send changes not found in the target repository')),
174 174 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
175 175 ('', 'bundlename', 'bundle',
176 176 _('name of the bundle attachment file'), _('NAME')),
177 177 ('r', 'rev', [], _('a revision to send'), _('REV')),
178 178 ('', 'force', None, _('run even when remote repository is unrelated '
179 179 '(with -b/--bundle)')),
180 180 ('', 'base', [], _('a base changeset to specify instead of a destination '
181 181 '(with -b/--bundle)'), _('REV')),
182 182 ('', 'intro', None, _('send an introduction email for a single patch')),
183 183 ] + emailopts + commands.remoteopts,
184 184 _('hg email [OPTION]... [DEST]...'))
185 185 def patchbomb(ui, repo, *revs, **opts):
186 186 '''send changesets by email
187 187
188 188 By default, diffs are sent in the format generated by
189 189 :hg:`export`, one per message. The series starts with a "[PATCH 0
190 190 of N]" introduction, which describes the series as a whole.
191 191
192 192 Each patch email has a Subject line of "[PATCH M of N] ...", using
193 193 the first line of the changeset description as the subject text.
194 194 The message contains two or three parts. First, the changeset
195 195 description.
196 196
197 197 With the -d/--diffstat option, if the diffstat program is
198 198 installed, the result of running diffstat on the patch is inserted.
199 199
200 200 Finally, the patch itself, as generated by :hg:`export`.
201 201
202 202 With the -d/--diffstat or --confirm options, you will be presented
203 203 with a final summary of all messages and asked for confirmation before
204 204 the messages are sent.
205 205
206 206 By default the patch is included as text in the email body for
207 207 easy reviewing. Using the -a/--attach option will instead create
208 208 an attachment for the patch. With -i/--inline an inline attachment
209 209 will be created. You can include a patch both as text in the email
210 210 body and as a regular or an inline attachment by combining the
211 211 -a/--attach or -i/--inline with the --body option.
212 212
213 213 With -o/--outgoing, emails will be generated for patches not found
214 214 in the destination repository (or only those which are ancestors
215 215 of the specified revisions if any are provided)
216 216
217 217 With -b/--bundle, changesets are selected as for --outgoing, but a
218 218 single email containing a binary Mercurial bundle as an attachment
219 219 will be sent.
220 220
221 221 With -m/--mbox, instead of previewing each patchbomb message in a
222 222 pager or sending the messages directly, it will create a UNIX
223 223 mailbox file with the patch emails. This mailbox file can be
224 224 previewed with any mail user agent which supports UNIX mbox
225 225 files.
226 226
227 227 With -n/--test, all steps will run, but mail will not be sent.
228 228 You will be prompted for an email recipient address, a subject and
229 229 an introductory message describing the patches of your patchbomb.
230 230 Then when all is done, patchbomb messages are displayed. If the
231 231 PAGER environment variable is set, your pager will be fired up once
232 232 for each patchbomb message, so you can verify everything is alright.
233 233
234 234 In case email sending fails, you will find a backup of your series
235 235 introductory message in ``.hg/last-email.txt``.
236 236
237 237 Examples::
238 238
239 239 hg email -r 3000 # send patch 3000 only
240 240 hg email -r 3000 -r 3001 # send patches 3000 and 3001
241 241 hg email -r 3000:3005 # send patches 3000 through 3005
242 242 hg email 3000 # send patch 3000 (deprecated)
243 243
244 244 hg email -o # send all patches not in default
245 245 hg email -o DEST # send all patches not in DEST
246 246 hg email -o -r 3000 # send all ancestors of 3000 not in default
247 247 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
248 248
249 249 hg email -b # send bundle of all patches not in default
250 250 hg email -b DEST # send bundle of all patches not in DEST
251 251 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
252 252 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
253 253
254 254 hg email -o -m mbox && # generate an mbox file...
255 255 mutt -R -f mbox # ... and view it with mutt
256 256 hg email -o -m mbox && # generate an mbox file ...
257 257 formail -s sendmail \\ # ... and use formail to send from the mbox
258 258 -bm -t < mbox # ... using sendmail
259 259
260 260 Before using this command, you will need to enable email in your
261 261 hgrc. See the [email] section in hgrc(5) for details.
262 262 '''
263 263
264 264 _charsets = mail._charsets(ui)
265 265
266 266 bundle = opts.get('bundle')
267 267 date = opts.get('date')
268 268 mbox = opts.get('mbox')
269 269 outgoing = opts.get('outgoing')
270 270 rev = opts.get('rev')
271 271 # internal option used by pbranches
272 272 patches = opts.get('patches')
273 273
274 274 def getoutgoing(dest, revs):
275 275 '''Return the revisions present locally but not in dest'''
276 276 url = ui.expandpath(dest or 'default-push', dest or 'default')
277 277 url = hg.parseurl(url)[0]
278 278 ui.status(_('comparing with %s\n') % util.hidepassword(url))
279 279
280 280 revs = [r for r in scmutil.revrange(repo, revs) if r >= 0]
281 281 if not revs:
282 282 revs = [len(repo) - 1]
283 283 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
284 284 if not revs:
285 285 ui.status(_("no changes found\n"))
286 286 return []
287 287 return [str(r) for r in revs]
288 288
289 289 def getpatches(revs):
290 290 for r in scmutil.revrange(repo, revs):
291 291 output = cStringIO.StringIO()
292 292 cmdutil.export(repo, [r], fp=output,
293 293 opts=patch.diffopts(ui, opts))
294 294 yield output.getvalue().split('\n')
295 295
296 296 def getbundle(dest):
297 297 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
298 298 tmpfn = os.path.join(tmpdir, 'bundle')
299 299 try:
300 300 commands.bundle(ui, repo, tmpfn, dest, **opts)
301 301 fp = open(tmpfn, 'rb')
302 302 data = fp.read()
303 303 fp.close()
304 304 return data
305 305 finally:
306 306 try:
307 307 os.unlink(tmpfn)
308 308 except OSError:
309 309 pass
310 310 os.rmdir(tmpdir)
311 311
312 312 if not (opts.get('test') or mbox):
313 313 # really sending
314 314 mail.validateconfig(ui)
315 315
316 316 if not (revs or rev or outgoing or bundle or patches):
317 317 raise util.Abort(_('specify at least one changeset with -r or -o'))
318 318
319 319 if outgoing and bundle:
320 320 raise util.Abort(_("--outgoing mode always on with --bundle;"
321 321 " do not re-specify --outgoing"))
322 322
323 323 if outgoing or bundle:
324 324 if len(revs) > 1:
325 325 raise util.Abort(_("too many destinations"))
326 326 dest = revs and revs[0] or None
327 327 revs = []
328 328
329 329 if rev:
330 330 if revs:
331 331 raise util.Abort(_('use only one form to specify the revision'))
332 332 revs = rev
333 333
334 334 if outgoing:
335 335 revs = getoutgoing(dest, rev)
336 336 if bundle:
337 337 opts['revs'] = revs
338 338
339 339 # start
340 340 if date:
341 341 start_time = util.parsedate(date)
342 342 else:
343 343 start_time = util.makedate()
344 344
345 345 def genmsgid(id):
346 346 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
347 347
348 348 def getdescription(body, sender):
349 349 if opts.get('desc'):
350 350 body = open(opts.get('desc')).read()
351 351 else:
352 352 ui.write(_('\nWrite the introductory message for the '
353 353 'patch series.\n\n'))
354 354 body = ui.edit(body, sender)
355 355 # Save series description in case sendmail fails
356 356 msgfile = repo.opener('last-email.txt', 'wb')
357 357 msgfile.write(body)
358 358 msgfile.close()
359 359 return body
360 360
361 361 def getpatchmsgs(patches, patchnames=None):
362 362 msgs = []
363 363
364 364 ui.write(_('this patch series consists of %d patches.\n\n')
365 365 % len(patches))
366 366
367 367 # build the intro message, or skip it if the user declines
368 368 if introwanted(opts, len(patches)):
369 369 msg = makeintro(patches)
370 370 if msg:
371 371 msgs.append(msg)
372 372
373 373 # are we going to send more than one message?
374 374 numbered = len(msgs) + len(patches) > 1
375 375
376 376 # now generate the actual patch messages
377 377 name = None
378 378 for i, p in enumerate(patches):
379 379 if patchnames:
380 380 name = patchnames[i]
381 381 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
382 382 len(patches), numbered, name)
383 383 msgs.append(msg)
384 384
385 385 return msgs
386 386
387 387 def makeintro(patches):
388 388 tlen = len(str(len(patches)))
389 389
390 390 flag = opts.get('flag') or ''
391 391 if flag:
392 392 flag = ' ' + ' '.join(flag)
393 393 prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
394 394
395 395 subj = (opts.get('subject') or
396 396 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
397 397 if not subj:
398 398 return None # skip intro if the user doesn't bother
399 399
400 400 subj = prefix + ' ' + subj
401 401
402 402 body = ''
403 403 if opts.get('diffstat'):
404 404 # generate a cumulative diffstat of the whole patch series
405 405 diffstat = patch.diffstat(sum(patches, []))
406 406 body = '\n' + diffstat
407 407 else:
408 408 diffstat = None
409 409
410 410 body = getdescription(body, sender)
411 411 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
412 412 msg['Subject'] = mail.headencode(ui, subj, _charsets,
413 413 opts.get('test'))
414 414 return (msg, subj, diffstat)
415 415
416 416 def getbundlemsgs(bundle):
417 417 subj = (opts.get('subject')
418 418 or prompt(ui, 'Subject:', 'A bundle for your repository'))
419 419
420 420 body = getdescription('', sender)
421 421 msg = email.MIMEMultipart.MIMEMultipart()
422 422 if body:
423 423 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
424 424 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
425 425 datapart.set_payload(bundle)
426 426 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
427 427 datapart.add_header('Content-Disposition', 'attachment',
428 428 filename=bundlename)
429 429 email.Encoders.encode_base64(datapart)
430 430 msg.attach(datapart)
431 431 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
432 432 return [(msg, subj, None)]
433 433
434 434 sender = (opts.get('from') or ui.config('email', 'from') or
435 435 ui.config('patchbomb', 'from') or
436 436 prompt(ui, 'From', ui.username()))
437 437
438 438 if patches:
439 439 msgs = getpatchmsgs(patches, opts.get('patchnames'))
440 440 elif bundle:
441 441 msgs = getbundlemsgs(getbundle(dest))
442 442 else:
443 443 msgs = getpatchmsgs(list(getpatches(revs)))
444 444
445 445 showaddrs = []
446 446
447 447 def getaddrs(header, ask=False, default=None):
448 448 configkey = header.lower()
449 449 opt = header.replace('-', '_').lower()
450 450 addrs = opts.get(opt)
451 451 if addrs:
452 452 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
453 453 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
454 454
455 455 # not on the command line: fallback to config and then maybe ask
456 456 addr = (ui.config('email', configkey) or
457 457 ui.config('patchbomb', configkey) or
458 458 '')
459 459 if not addr and ask:
460 460 addr = prompt(ui, header, default=default)
461 461 if addr:
462 462 showaddrs.append('%s: %s' % (header, addr))
463 463 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
464 464 else:
465 465 return default
466 466
467 467 to = getaddrs('To', ask=True)
468 468 if not to:
469 469 # we can get here in non-interactive mode
470 470 raise util.Abort(_('no recipient addresses provided'))
471 471 cc = getaddrs('Cc', ask=True, default='') or []
472 472 bcc = getaddrs('Bcc') or []
473 473 replyto = getaddrs('Reply-To')
474 474
475 475 if opts.get('diffstat') or opts.get('confirm'):
476 476 ui.write(_('\nFinal summary:\n\n'))
477 477 ui.write(('From: %s\n' % sender))
478 478 for addr in showaddrs:
479 479 ui.write('%s\n' % addr)
480 480 for m, subj, ds in msgs:
481 481 ui.write(('Subject: %s\n' % subj))
482 482 if ds:
483 483 ui.write(ds)
484 484 ui.write('\n')
485 if ui.promptchoice(_('are you sure you want to send (yn)?'),
486 (_('&Yes'), _('&No'))):
485 if ui.promptchoice(_('are you sure you want to send (yn)?'
486 '$$ &Yes $$ &No')):
487 487 raise util.Abort(_('patchbomb canceled'))
488 488
489 489 ui.write('\n')
490 490
491 491 parent = opts.get('in_reply_to') or None
492 492 # angle brackets may be omitted, they're not semantically part of the msg-id
493 493 if parent is not None:
494 494 if not parent.startswith('<'):
495 495 parent = '<' + parent
496 496 if not parent.endswith('>'):
497 497 parent += '>'
498 498
499 499 sender_addr = email.Utils.parseaddr(sender)[1]
500 500 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
501 501 sendmail = None
502 502 for i, (m, subj, ds) in enumerate(msgs):
503 503 try:
504 504 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
505 505 except TypeError:
506 506 m['Message-Id'] = genmsgid('patchbomb')
507 507 if parent:
508 508 m['In-Reply-To'] = parent
509 509 m['References'] = parent
510 510 if not parent or 'X-Mercurial-Node' not in m:
511 511 parent = m['Message-Id']
512 512
513 513 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
514 514 m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)
515 515
516 516 start_time = (start_time[0] + 1, start_time[1])
517 517 m['From'] = sender
518 518 m['To'] = ', '.join(to)
519 519 if cc:
520 520 m['Cc'] = ', '.join(cc)
521 521 if bcc:
522 522 m['Bcc'] = ', '.join(bcc)
523 523 if replyto:
524 524 m['Reply-To'] = ', '.join(replyto)
525 525 if opts.get('test'):
526 526 ui.status(_('displaying '), subj, ' ...\n')
527 527 ui.flush()
528 528 if 'PAGER' in os.environ and not ui.plain():
529 529 fp = util.popen(os.environ['PAGER'], 'w')
530 530 else:
531 531 fp = ui
532 532 generator = email.Generator.Generator(fp, mangle_from_=False)
533 533 try:
534 534 generator.flatten(m, 0)
535 535 fp.write('\n')
536 536 except IOError, inst:
537 537 if inst.errno != errno.EPIPE:
538 538 raise
539 539 if fp is not ui:
540 540 fp.close()
541 541 else:
542 542 if not sendmail:
543 543 verifycert = ui.config('smtp', 'verifycert')
544 544 if opts.get('insecure'):
545 545 ui.setconfig('smtp', 'verifycert', 'loose')
546 546 try:
547 547 sendmail = mail.connect(ui, mbox=mbox)
548 548 finally:
549 549 ui.setconfig('smtp', 'verifycert', verifycert)
550 550 ui.status(_('sending '), subj, ' ...\n')
551 551 ui.progress(_('sending'), i, item=subj, total=len(msgs))
552 552 if not mbox:
553 553 # Exim does not remove the Bcc field
554 554 del m['Bcc']
555 555 fp = cStringIO.StringIO()
556 556 generator = email.Generator.Generator(fp, mangle_from_=False)
557 557 generator.flatten(m, 0)
558 558 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
559 559
560 560 ui.progress(_('writing'), None)
561 561 ui.progress(_('sending'), None)
@@ -1,680 +1,680 b''
1 1 # record.py
2 2 #
3 3 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''commands to interactively select changes for commit/qrefresh'''
9 9
10 10 from mercurial.i18n import gettext, _
11 11 from mercurial import cmdutil, commands, extensions, hg, patch
12 12 from mercurial import util
13 13 import copy, cStringIO, errno, os, re, shutil, tempfile
14 14
15 15 cmdtable = {}
16 16 command = cmdutil.command(cmdtable)
17 17 testedwith = 'internal'
18 18
19 19 lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
20 20
21 21 diffopts = [
22 22 ('w', 'ignore-all-space', False,
23 23 _('ignore white space when comparing lines')),
24 24 ('b', 'ignore-space-change', None,
25 25 _('ignore changes in the amount of white space')),
26 26 ('B', 'ignore-blank-lines', None,
27 27 _('ignore changes whose lines are all blank')),
28 28 ]
29 29
30 30 def scanpatch(fp):
31 31 """like patch.iterhunks, but yield different events
32 32
33 33 - ('file', [header_lines + fromfile + tofile])
34 34 - ('context', [context_lines])
35 35 - ('hunk', [hunk_lines])
36 36 - ('range', (-start,len, +start,len, proc))
37 37 """
38 38 lr = patch.linereader(fp)
39 39
40 40 def scanwhile(first, p):
41 41 """scan lr while predicate holds"""
42 42 lines = [first]
43 43 while True:
44 44 line = lr.readline()
45 45 if not line:
46 46 break
47 47 if p(line):
48 48 lines.append(line)
49 49 else:
50 50 lr.push(line)
51 51 break
52 52 return lines
53 53
54 54 while True:
55 55 line = lr.readline()
56 56 if not line:
57 57 break
58 58 if line.startswith('diff --git a/') or line.startswith('diff -r '):
59 59 def notheader(line):
60 60 s = line.split(None, 1)
61 61 return not s or s[0] not in ('---', 'diff')
62 62 header = scanwhile(line, notheader)
63 63 fromfile = lr.readline()
64 64 if fromfile.startswith('---'):
65 65 tofile = lr.readline()
66 66 header += [fromfile, tofile]
67 67 else:
68 68 lr.push(fromfile)
69 69 yield 'file', header
70 70 elif line[0] == ' ':
71 71 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
72 72 elif line[0] in '-+':
73 73 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
74 74 else:
75 75 m = lines_re.match(line)
76 76 if m:
77 77 yield 'range', m.groups()
78 78 else:
79 79 yield 'other', line
80 80
81 81 class header(object):
82 82 """patch header
83 83
84 84 XXX shouldn't we move this to mercurial/patch.py ?
85 85 """
86 86 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
87 87 diff_re = re.compile('diff -r .* (.*)$')
88 88 allhunks_re = re.compile('(?:index|new file|deleted file) ')
89 89 pretty_re = re.compile('(?:new file|deleted file) ')
90 90 special_re = re.compile('(?:index|new|deleted|copy|rename) ')
91 91
92 92 def __init__(self, header):
93 93 self.header = header
94 94 self.hunks = []
95 95
96 96 def binary(self):
97 97 return util.any(h.startswith('index ') for h in self.header)
98 98
99 99 def pretty(self, fp):
100 100 for h in self.header:
101 101 if h.startswith('index '):
102 102 fp.write(_('this modifies a binary file (all or nothing)\n'))
103 103 break
104 104 if self.pretty_re.match(h):
105 105 fp.write(h)
106 106 if self.binary():
107 107 fp.write(_('this is a binary file\n'))
108 108 break
109 109 if h.startswith('---'):
110 110 fp.write(_('%d hunks, %d lines changed\n') %
111 111 (len(self.hunks),
112 112 sum([max(h.added, h.removed) for h in self.hunks])))
113 113 break
114 114 fp.write(h)
115 115
116 116 def write(self, fp):
117 117 fp.write(''.join(self.header))
118 118
119 119 def allhunks(self):
120 120 return util.any(self.allhunks_re.match(h) for h in self.header)
121 121
122 122 def files(self):
123 123 match = self.diffgit_re.match(self.header[0])
124 124 if match:
125 125 fromfile, tofile = match.groups()
126 126 if fromfile == tofile:
127 127 return [fromfile]
128 128 return [fromfile, tofile]
129 129 else:
130 130 return self.diff_re.match(self.header[0]).groups()
131 131
132 132 def filename(self):
133 133 return self.files()[-1]
134 134
135 135 def __repr__(self):
136 136 return '<header %s>' % (' '.join(map(repr, self.files())))
137 137
138 138 def special(self):
139 139 return util.any(self.special_re.match(h) for h in self.header)
140 140
141 141 def countchanges(hunk):
142 142 """hunk -> (n+,n-)"""
143 143 add = len([h for h in hunk if h[0] == '+'])
144 144 rem = len([h for h in hunk if h[0] == '-'])
145 145 return add, rem
146 146
147 147 class hunk(object):
148 148 """patch hunk
149 149
150 150 XXX shouldn't we merge this with patch.hunk ?
151 151 """
152 152 maxcontext = 3
153 153
154 154 def __init__(self, header, fromline, toline, proc, before, hunk, after):
155 155 def trimcontext(number, lines):
156 156 delta = len(lines) - self.maxcontext
157 157 if False and delta > 0:
158 158 return number + delta, lines[:self.maxcontext]
159 159 return number, lines
160 160
161 161 self.header = header
162 162 self.fromline, self.before = trimcontext(fromline, before)
163 163 self.toline, self.after = trimcontext(toline, after)
164 164 self.proc = proc
165 165 self.hunk = hunk
166 166 self.added, self.removed = countchanges(self.hunk)
167 167
168 168 def write(self, fp):
169 169 delta = len(self.before) + len(self.after)
170 170 if self.after and self.after[-1] == '\\ No newline at end of file\n':
171 171 delta -= 1
172 172 fromlen = delta + self.removed
173 173 tolen = delta + self.added
174 174 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
175 175 (self.fromline, fromlen, self.toline, tolen,
176 176 self.proc and (' ' + self.proc)))
177 177 fp.write(''.join(self.before + self.hunk + self.after))
178 178
179 179 pretty = write
180 180
181 181 def filename(self):
182 182 return self.header.filename()
183 183
184 184 def __repr__(self):
185 185 return '<hunk %r@%d>' % (self.filename(), self.fromline)
186 186
187 187 def parsepatch(fp):
188 188 """patch -> [] of headers -> [] of hunks """
189 189 class parser(object):
190 190 """patch parsing state machine"""
191 191 def __init__(self):
192 192 self.fromline = 0
193 193 self.toline = 0
194 194 self.proc = ''
195 195 self.header = None
196 196 self.context = []
197 197 self.before = []
198 198 self.hunk = []
199 199 self.headers = []
200 200
201 201 def addrange(self, limits):
202 202 fromstart, fromend, tostart, toend, proc = limits
203 203 self.fromline = int(fromstart)
204 204 self.toline = int(tostart)
205 205 self.proc = proc
206 206
207 207 def addcontext(self, context):
208 208 if self.hunk:
209 209 h = hunk(self.header, self.fromline, self.toline, self.proc,
210 210 self.before, self.hunk, context)
211 211 self.header.hunks.append(h)
212 212 self.fromline += len(self.before) + h.removed
213 213 self.toline += len(self.before) + h.added
214 214 self.before = []
215 215 self.hunk = []
216 216 self.proc = ''
217 217 self.context = context
218 218
219 219 def addhunk(self, hunk):
220 220 if self.context:
221 221 self.before = self.context
222 222 self.context = []
223 223 self.hunk = hunk
224 224
225 225 def newfile(self, hdr):
226 226 self.addcontext([])
227 227 h = header(hdr)
228 228 self.headers.append(h)
229 229 self.header = h
230 230
231 231 def addother(self, line):
232 232 pass # 'other' lines are ignored
233 233
234 234 def finished(self):
235 235 self.addcontext([])
236 236 return self.headers
237 237
238 238 transitions = {
239 239 'file': {'context': addcontext,
240 240 'file': newfile,
241 241 'hunk': addhunk,
242 242 'range': addrange},
243 243 'context': {'file': newfile,
244 244 'hunk': addhunk,
245 245 'range': addrange,
246 246 'other': addother},
247 247 'hunk': {'context': addcontext,
248 248 'file': newfile,
249 249 'range': addrange},
250 250 'range': {'context': addcontext,
251 251 'hunk': addhunk},
252 252 'other': {'other': addother},
253 253 }
254 254
255 255 p = parser()
256 256
257 257 state = 'context'
258 258 for newstate, data in scanpatch(fp):
259 259 try:
260 260 p.transitions[state][newstate](p, data)
261 261 except KeyError:
262 262 raise patch.PatchError('unhandled transition: %s -> %s' %
263 263 (state, newstate))
264 264 state = newstate
265 265 return p.finished()
266 266
267 267 def filterpatch(ui, headers):
268 268 """Interactively filter patch chunks into applied-only chunks"""
269 269
270 270 def prompt(skipfile, skipall, query, chunk):
271 271 """prompt query, and process base inputs
272 272
273 273 - y/n for the rest of file
274 274 - y/n for the rest
275 275 - ? (help)
276 276 - q (quit)
277 277
278 278 Return True/False and possibly updated skipfile and skipall.
279 279 """
280 280 newpatches = None
281 281 if skipall is not None:
282 282 return skipall, skipfile, skipall, newpatches
283 283 if skipfile is not None:
284 284 return skipfile, skipfile, skipall, newpatches
285 285 while True:
286 resps = _('[Ynesfdaq?]')
287 choices = (_('&Yes, record this change'),
288 _('&No, skip this change'),
289 _('&Edit the change manually'),
290 _('&Skip remaining changes to this file'),
291 _('Record remaining changes to this &file'),
292 _('&Done, skip remaining changes and files'),
293 _('Record &all changes to all remaining files'),
294 _('&Quit, recording no changes'),
295 _('&?'))
296 r = ui.promptchoice("%s %s" % (query, resps), choices)
286 resps = _('[Ynesfdaq?]'
287 '$$ &Yes, record this change'
288 '$$ &No, skip this change'
289 '$$ &Edit the change manually'
290 '$$ &Skip remaining changes to this file'
291 '$$ Record remaining changes to this &file'
292 '$$ &Done, skip remaining changes and files'
293 '$$ Record &all changes to all remaining files'
294 '$$ &Quit, recording no changes'
295 '$$ &?')
296 r = ui.promptchoice("%s %s" % (query, resps))
297 297 ui.write("\n")
298 298 if r == 8: # ?
299 299 doc = gettext(record.__doc__)
300 300 c = doc.find('::') + 2
301 301 for l in doc[c:].splitlines():
302 302 if l.startswith(' '):
303 303 ui.write(l.strip(), '\n')
304 304 continue
305 305 elif r == 0: # yes
306 306 ret = True
307 307 elif r == 1: # no
308 308 ret = False
309 309 elif r == 2: # Edit patch
310 310 if chunk is None:
311 311 ui.write(_('cannot edit patch for whole file'))
312 312 ui.write("\n")
313 313 continue
314 314 if chunk.header.binary():
315 315 ui.write(_('cannot edit patch for binary file'))
316 316 ui.write("\n")
317 317 continue
318 318 # Patch comment based on the Git one (based on comment at end of
319 319 # http://mercurial.selenic.com/wiki/RecordExtension)
320 320 phelp = '---' + _("""
321 321 To remove '-' lines, make them ' ' lines (context).
322 322 To remove '+' lines, delete them.
323 323 Lines starting with # will be removed from the patch.
324 324
325 325 If the patch applies cleanly, the edited hunk will immediately be
326 326 added to the record list. If it does not apply cleanly, a rejects
327 327 file will be generated: you can use that when you try again. If
328 328 all lines of the hunk are removed, then the edit is aborted and
329 329 the hunk is left unchanged.
330 330 """)
331 331 (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
332 332 suffix=".diff", text=True)
333 333 ncpatchfp = None
334 334 try:
335 335 # Write the initial patch
336 336 f = os.fdopen(patchfd, "w")
337 337 chunk.header.write(f)
338 338 chunk.write(f)
339 339 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
340 340 f.close()
341 341 # Start the editor and wait for it to complete
342 342 editor = ui.geteditor()
343 343 util.system("%s \"%s\"" % (editor, patchfn),
344 344 environ={'HGUSER': ui.username()},
345 345 onerr=util.Abort, errprefix=_("edit failed"),
346 346 out=ui.fout)
347 347 # Remove comment lines
348 348 patchfp = open(patchfn)
349 349 ncpatchfp = cStringIO.StringIO()
350 350 for line in patchfp:
351 351 if not line.startswith('#'):
352 352 ncpatchfp.write(line)
353 353 patchfp.close()
354 354 ncpatchfp.seek(0)
355 355 newpatches = parsepatch(ncpatchfp)
356 356 finally:
357 357 os.unlink(patchfn)
358 358 del ncpatchfp
359 359 # Signal that the chunk shouldn't be applied as-is, but
360 360 # provide the new patch to be used instead.
361 361 ret = False
362 362 elif r == 3: # Skip
363 363 ret = skipfile = False
364 364 elif r == 4: # file (Record remaining)
365 365 ret = skipfile = True
366 366 elif r == 5: # done, skip remaining
367 367 ret = skipall = False
368 368 elif r == 6: # all
369 369 ret = skipall = True
370 370 elif r == 7: # quit
371 371 raise util.Abort(_('user quit'))
372 372 return ret, skipfile, skipall, newpatches
373 373
374 374 seen = set()
375 375 applied = {} # 'filename' -> [] of chunks
376 376 skipfile, skipall = None, None
377 377 pos, total = 1, sum(len(h.hunks) for h in headers)
378 378 for h in headers:
379 379 pos += len(h.hunks)
380 380 skipfile = None
381 381 fixoffset = 0
382 382 hdr = ''.join(h.header)
383 383 if hdr in seen:
384 384 continue
385 385 seen.add(hdr)
386 386 if skipall is None:
387 387 h.pretty(ui)
388 388 msg = (_('examine changes to %s?') %
389 389 _(' and ').join("'%s'" % f for f in h.files()))
390 390 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
391 391 if not r:
392 392 continue
393 393 applied[h.filename()] = [h]
394 394 if h.allhunks():
395 395 applied[h.filename()] += h.hunks
396 396 continue
397 397 for i, chunk in enumerate(h.hunks):
398 398 if skipfile is None and skipall is None:
399 399 chunk.pretty(ui)
400 400 if total == 1:
401 401 msg = _("record this change to '%s'?") % chunk.filename()
402 402 else:
403 403 idx = pos - len(h.hunks) + i
404 404 msg = _("record change %d/%d to '%s'?") % (idx, total,
405 405 chunk.filename())
406 406 r, skipfile, skipall, newpatches = prompt(skipfile,
407 407 skipall, msg, chunk)
408 408 if r:
409 409 if fixoffset:
410 410 chunk = copy.copy(chunk)
411 411 chunk.toline += fixoffset
412 412 applied[chunk.filename()].append(chunk)
413 413 elif newpatches is not None:
414 414 for newpatch in newpatches:
415 415 for newhunk in newpatch.hunks:
416 416 if fixoffset:
417 417 newhunk.toline += fixoffset
418 418 applied[newhunk.filename()].append(newhunk)
419 419 else:
420 420 fixoffset += chunk.removed - chunk.added
421 421 return sum([h for h in applied.itervalues()
422 422 if h[0].special() or len(h) > 1], [])
423 423
424 424 @command("record",
425 425 # same options as commit + white space diff options
426 426 commands.table['^commit|ci'][1][:] + diffopts,
427 427 _('hg record [OPTION]... [FILE]...'))
428 428 def record(ui, repo, *pats, **opts):
429 429 '''interactively select changes to commit
430 430
431 431 If a list of files is omitted, all changes reported by :hg:`status`
432 432 will be candidates for recording.
433 433
434 434 See :hg:`help dates` for a list of formats valid for -d/--date.
435 435
436 436 You will be prompted for whether to record changes to each
437 437 modified file, and for files with multiple changes, for each
438 438 change to use. For each query, the following responses are
439 439 possible::
440 440
441 441 y - record this change
442 442 n - skip this change
443 443 e - edit this change manually
444 444
445 445 s - skip remaining changes to this file
446 446 f - record remaining changes to this file
447 447
448 448 d - done, skip remaining changes and files
449 449 a - record all changes to all remaining files
450 450 q - quit, recording no changes
451 451
452 452 ? - display help
453 453
454 454 This command is not available when committing a merge.'''
455 455
456 456 dorecord(ui, repo, commands.commit, 'commit', False, *pats, **opts)
457 457
458 458 def qrefresh(origfn, ui, repo, *pats, **opts):
459 459 if not opts['interactive']:
460 460 return origfn(ui, repo, *pats, **opts)
461 461
462 462 mq = extensions.find('mq')
463 463
464 464 def committomq(ui, repo, *pats, **opts):
465 465 # At this point the working copy contains only changes that
466 466 # were accepted. All other changes were reverted.
467 467 # We can't pass *pats here since qrefresh will undo all other
468 468 # changed files in the patch that aren't in pats.
469 469 mq.refresh(ui, repo, **opts)
470 470
471 471 # backup all changed files
472 472 dorecord(ui, repo, committomq, 'qrefresh', True, *pats, **opts)
473 473
474 474 def qrecord(ui, repo, patch, *pats, **opts):
475 475 '''interactively record a new patch
476 476
477 477 See :hg:`help qnew` & :hg:`help record` for more information and
478 478 usage.
479 479 '''
480 480
481 481 try:
482 482 mq = extensions.find('mq')
483 483 except KeyError:
484 484 raise util.Abort(_("'mq' extension not loaded"))
485 485
486 486 repo.mq.checkpatchname(patch)
487 487
488 488 def committomq(ui, repo, *pats, **opts):
489 489 opts['checkname'] = False
490 490 mq.new(ui, repo, patch, *pats, **opts)
491 491
492 492 dorecord(ui, repo, committomq, 'qnew', False, *pats, **opts)
493 493
494 494 def qnew(origfn, ui, repo, patch, *args, **opts):
495 495 if opts['interactive']:
496 496 return qrecord(ui, repo, patch, *args, **opts)
497 497 return origfn(ui, repo, patch, *args, **opts)
498 498
499 499 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall, *pats, **opts):
500 500 if not ui.interactive():
501 501 raise util.Abort(_('running non-interactively, use %s instead') %
502 502 cmdsuggest)
503 503
504 504 # make sure username is set before going interactive
505 505 ui.username()
506 506
507 507 def recordfunc(ui, repo, message, match, opts):
508 508 """This is generic record driver.
509 509
510 510 Its job is to interactively filter local changes, and
511 511 accordingly prepare working directory into a state in which the
512 512 job can be delegated to a non-interactive commit command such as
513 513 'commit' or 'qrefresh'.
514 514
515 515 After the actual job is done by non-interactive command, the
516 516 working directory is restored to its original state.
517 517
518 518 In the end we'll record interesting changes, and everything else
519 519 will be left in place, so the user can continue working.
520 520 """
521 521
522 522 merge = len(repo[None].parents()) > 1
523 523 if merge:
524 524 raise util.Abort(_('cannot partially commit a merge '
525 525 '(use "hg commit" instead)'))
526 526
527 527 changes = repo.status(match=match)[:3]
528 528 diffopts = patch.diffopts(ui, opts=dict(
529 529 git=True, nodates=True,
530 530 ignorews=opts.get('ignore_all_space'),
531 531 ignorewsamount=opts.get('ignore_space_change'),
532 532 ignoreblanklines=opts.get('ignore_blank_lines')))
533 533 chunks = patch.diff(repo, changes=changes, opts=diffopts)
534 534 fp = cStringIO.StringIO()
535 535 fp.write(''.join(chunks))
536 536 fp.seek(0)
537 537
538 538 # 1. filter patch, so we have intending-to apply subset of it
539 539 try:
540 540 chunks = filterpatch(ui, parsepatch(fp))
541 541 except patch.PatchError, err:
542 542 raise util.Abort(_('error parsing patch: %s') % err)
543 543
544 544 del fp
545 545
546 546 contenders = set()
547 547 for h in chunks:
548 548 try:
549 549 contenders.update(set(h.files()))
550 550 except AttributeError:
551 551 pass
552 552
553 553 changed = changes[0] + changes[1] + changes[2]
554 554 newfiles = [f for f in changed if f in contenders]
555 555 if not newfiles:
556 556 ui.status(_('no changes to record\n'))
557 557 return 0
558 558
559 559 modified = set(changes[0])
560 560
561 561 # 2. backup changed files, so we can restore them in the end
562 562 if backupall:
563 563 tobackup = changed
564 564 else:
565 565 tobackup = [f for f in newfiles if f in modified]
566 566
567 567 backups = {}
568 568 if tobackup:
569 569 backupdir = repo.join('record-backups')
570 570 try:
571 571 os.mkdir(backupdir)
572 572 except OSError, err:
573 573 if err.errno != errno.EEXIST:
574 574 raise
575 575 try:
576 576 # backup continues
577 577 for f in tobackup:
578 578 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
579 579 dir=backupdir)
580 580 os.close(fd)
581 581 ui.debug('backup %r as %r\n' % (f, tmpname))
582 582 util.copyfile(repo.wjoin(f), tmpname)
583 583 shutil.copystat(repo.wjoin(f), tmpname)
584 584 backups[f] = tmpname
585 585
586 586 fp = cStringIO.StringIO()
587 587 for c in chunks:
588 588 if c.filename() in backups:
589 589 c.write(fp)
590 590 dopatch = fp.tell()
591 591 fp.seek(0)
592 592
593 593 # 3a. apply filtered patch to clean repo (clean)
594 594 if backups:
595 595 hg.revert(repo, repo.dirstate.p1(),
596 596 lambda key: key in backups)
597 597
598 598 # 3b. (apply)
599 599 if dopatch:
600 600 try:
601 601 ui.debug('applying patch\n')
602 602 ui.debug(fp.getvalue())
603 603 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
604 604 except patch.PatchError, err:
605 605 raise util.Abort(str(err))
606 606 del fp
607 607
608 608 # 4. We prepared working directory according to filtered
609 609 # patch. Now is the time to delegate the job to
610 610 # commit/qrefresh or the like!
611 611
612 612 # it is important to first chdir to repo root -- we'll call
613 613 # a highlevel command with list of pathnames relative to
614 614 # repo root
615 615 cwd = os.getcwd()
616 616 os.chdir(repo.root)
617 617 try:
618 618 commitfunc(ui, repo, *newfiles, **opts)
619 619 finally:
620 620 os.chdir(cwd)
621 621
622 622 return 0
623 623 finally:
624 624 # 5. finally restore backed-up files
625 625 try:
626 626 for realname, tmpname in backups.iteritems():
627 627 ui.debug('restoring %r to %r\n' % (tmpname, realname))
628 628 util.copyfile(tmpname, repo.wjoin(realname))
629 629 # Our calls to copystat() here and above are a
630 630 # hack to trick any editors that have f open that
631 631 # we haven't modified them.
632 632 #
633 633 # Also note that this racy as an editor could
634 634 # notice the file's mtime before we've finished
635 635 # writing it.
636 636 shutil.copystat(tmpname, repo.wjoin(realname))
637 637 os.unlink(tmpname)
638 638 if tobackup:
639 639 os.rmdir(backupdir)
640 640 except OSError:
641 641 pass
642 642
643 643 # wrap ui.write so diff output can be labeled/colorized
644 644 def wrapwrite(orig, *args, **kw):
645 645 label = kw.pop('label', '')
646 646 for chunk, l in patch.difflabel(lambda: args):
647 647 orig(chunk, label=label + l)
648 648 oldwrite = ui.write
649 649 extensions.wrapfunction(ui, 'write', wrapwrite)
650 650 try:
651 651 return cmdutil.commit(ui, repo, recordfunc, pats, opts)
652 652 finally:
653 653 ui.write = oldwrite
654 654
655 655 cmdtable["qrecord"] = \
656 656 (qrecord, [], # placeholder until mq is available
657 657 _('hg qrecord [OPTION]... PATCH [FILE]...'))
658 658
659 659 def uisetup(ui):
660 660 try:
661 661 mq = extensions.find('mq')
662 662 except KeyError:
663 663 return
664 664
665 665 cmdtable["qrecord"] = \
666 666 (qrecord,
667 667 # same options as qnew, but copy them so we don't get
668 668 # -i/--interactive for qrecord and add white space diff options
669 669 mq.cmdtable['^qnew'][1][:] + diffopts,
670 670 _('hg qrecord [OPTION]... PATCH [FILE]...'))
671 671
672 672 _wrapcmd('qnew', mq.cmdtable, qnew, _("interactively record a new patch"))
673 673 _wrapcmd('qrefresh', mq.cmdtable, qrefresh,
674 674 _("interactively select changes to refresh"))
675 675
676 676 def _wrapcmd(cmd, table, wrapfn, msg):
677 677 entry = extensions.wrapcommand(table, cmd, wrapfn)
678 678 entry[1].append(('i', 'interactive', None, msg))
679 679
680 680 commands.inferrepo += " record qrecord"
@@ -1,377 +1,377 b''
1 1 # filemerge.py - file-level merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import short
9 9 from i18n import _
10 10 import util, simplemerge, match, error
11 11 import os, tempfile, re, filecmp
12 12
13 13 def _toolstr(ui, tool, part, default=""):
14 14 return ui.config("merge-tools", tool + "." + part, default)
15 15
16 16 def _toolbool(ui, tool, part, default=False):
17 17 return ui.configbool("merge-tools", tool + "." + part, default)
18 18
19 19 def _toollist(ui, tool, part, default=[]):
20 20 return ui.configlist("merge-tools", tool + "." + part, default)
21 21
22 22 internals = {}
23 23
24 24 def internaltool(name, trymerge, onfailure=None):
25 25 '''return a decorator for populating internal merge tool table'''
26 26 def decorator(func):
27 27 fullname = 'internal:' + name
28 28 func.__doc__ = "``%s``\n" % fullname + func.__doc__.strip()
29 29 internals[fullname] = func
30 30 func.trymerge = trymerge
31 31 func.onfailure = onfailure
32 32 return func
33 33 return decorator
34 34
35 35 def _findtool(ui, tool):
36 36 if tool in internals:
37 37 return tool
38 38 for kn in ("regkey", "regkeyalt"):
39 39 k = _toolstr(ui, tool, kn)
40 40 if not k:
41 41 continue
42 42 p = util.lookupreg(k, _toolstr(ui, tool, "regname"))
43 43 if p:
44 44 p = util.findexe(p + _toolstr(ui, tool, "regappend"))
45 45 if p:
46 46 return p
47 47 exe = _toolstr(ui, tool, "executable", tool)
48 48 return util.findexe(util.expandpath(exe))
49 49
50 50 def _picktool(repo, ui, path, binary, symlink):
51 51 def check(tool, pat, symlink, binary):
52 52 tmsg = tool
53 53 if pat:
54 54 tmsg += " specified for " + pat
55 55 if not _findtool(ui, tool):
56 56 if pat: # explicitly requested tool deserves a warning
57 57 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
58 58 else: # configured but non-existing tools are more silent
59 59 ui.note(_("couldn't find merge tool %s\n") % tmsg)
60 60 elif symlink and not _toolbool(ui, tool, "symlink"):
61 61 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
62 62 elif binary and not _toolbool(ui, tool, "binary"):
63 63 ui.warn(_("tool %s can't handle binary\n") % tmsg)
64 64 elif not util.gui() and _toolbool(ui, tool, "gui"):
65 65 ui.warn(_("tool %s requires a GUI\n") % tmsg)
66 66 else:
67 67 return True
68 68 return False
69 69
70 70 # forcemerge comes from command line arguments, highest priority
71 71 force = ui.config('ui', 'forcemerge')
72 72 if force:
73 73 toolpath = _findtool(ui, force)
74 74 if toolpath:
75 75 return (force, util.shellquote(toolpath))
76 76 else:
77 77 # mimic HGMERGE if given tool not found
78 78 return (force, force)
79 79
80 80 # HGMERGE takes next precedence
81 81 hgmerge = os.environ.get("HGMERGE")
82 82 if hgmerge:
83 83 return (hgmerge, hgmerge)
84 84
85 85 # then patterns
86 86 for pat, tool in ui.configitems("merge-patterns"):
87 87 mf = match.match(repo.root, '', [pat])
88 88 if mf(path) and check(tool, pat, symlink, False):
89 89 toolpath = _findtool(ui, tool)
90 90 return (tool, util.shellquote(toolpath))
91 91
92 92 # then merge tools
93 93 tools = {}
94 94 for k, v in ui.configitems("merge-tools"):
95 95 t = k.split('.')[0]
96 96 if t not in tools:
97 97 tools[t] = int(_toolstr(ui, t, "priority", "0"))
98 98 names = tools.keys()
99 99 tools = sorted([(-p, t) for t, p in tools.items()])
100 100 uimerge = ui.config("ui", "merge")
101 101 if uimerge:
102 102 if uimerge not in names:
103 103 return (uimerge, uimerge)
104 104 tools.insert(0, (None, uimerge)) # highest priority
105 105 tools.append((None, "hgmerge")) # the old default, if found
106 106 for p, t in tools:
107 107 if check(t, None, symlink, binary):
108 108 toolpath = _findtool(ui, t)
109 109 return (t, util.shellquote(toolpath))
110 110
111 111 # internal merge or prompt as last resort
112 112 if symlink or binary:
113 113 return "internal:prompt", None
114 114 return "internal:merge", None
115 115
116 116 def _eoltype(data):
117 117 "Guess the EOL type of a file"
118 118 if '\0' in data: # binary
119 119 return None
120 120 if '\r\n' in data: # Windows
121 121 return '\r\n'
122 122 if '\r' in data: # Old Mac
123 123 return '\r'
124 124 if '\n' in data: # UNIX
125 125 return '\n'
126 126 return None # unknown
127 127
128 128 def _matcheol(file, origfile):
129 129 "Convert EOL markers in a file to match origfile"
130 130 tostyle = _eoltype(util.readfile(origfile))
131 131 if tostyle:
132 132 data = util.readfile(file)
133 133 style = _eoltype(data)
134 134 if style:
135 135 newdata = data.replace(style, tostyle)
136 136 if newdata != data:
137 137 util.writefile(file, newdata)
138 138
139 139 @internaltool('prompt', False)
140 140 def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf):
141 141 """Asks the user which of the local or the other version to keep as
142 142 the merged version."""
143 143 ui = repo.ui
144 144 fd = fcd.path()
145 145
146 146 if ui.promptchoice(_(" no tool found to merge %s\n"
147 "keep (l)ocal or take (o)ther?") % fd,
148 (_("&Local"), _("&Other")), 0):
147 "keep (l)ocal or take (o)ther?"
148 "$$ &Local $$ &Other") % fd, 0):
149 149 return _iother(repo, mynode, orig, fcd, fco, fca, toolconf)
150 150 else:
151 151 return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf)
152 152
153 153 @internaltool('local', False)
154 154 def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf):
155 155 """Uses the local version of files as the merged version."""
156 156 return 0
157 157
158 158 @internaltool('other', False)
159 159 def _iother(repo, mynode, orig, fcd, fco, fca, toolconf):
160 160 """Uses the other version of files as the merged version."""
161 161 repo.wwrite(fcd.path(), fco.data(), fco.flags())
162 162 return 0
163 163
164 164 @internaltool('fail', False)
165 165 def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf):
166 166 """
167 167 Rather than attempting to merge files that were modified on both
168 168 branches, it marks them as unresolved. The resolve command must be
169 169 used to resolve these conflicts."""
170 170 return 1
171 171
172 172 def _premerge(repo, toolconf, files):
173 173 tool, toolpath, binary, symlink = toolconf
174 174 if symlink:
175 175 return 1
176 176 a, b, c, back = files
177 177
178 178 ui = repo.ui
179 179
180 180 # do we attempt to simplemerge first?
181 181 try:
182 182 premerge = _toolbool(ui, tool, "premerge", not binary)
183 183 except error.ConfigError:
184 184 premerge = _toolstr(ui, tool, "premerge").lower()
185 185 valid = 'keep'.split()
186 186 if premerge not in valid:
187 187 _valid = ', '.join(["'" + v + "'" for v in valid])
188 188 raise error.ConfigError(_("%s.premerge not valid "
189 189 "('%s' is neither boolean nor %s)") %
190 190 (tool, premerge, _valid))
191 191
192 192 if premerge:
193 193 r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
194 194 if not r:
195 195 ui.debug(" premerge successful\n")
196 196 return 0
197 197 if premerge != 'keep':
198 198 util.copyfile(back, a) # restore from backup and try again
199 199 return 1 # continue merging
200 200
201 201 @internaltool('merge', True,
202 202 _("merging %s incomplete! "
203 203 "(edit conflicts, then use 'hg resolve --mark')\n"))
204 204 def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files):
205 205 """
206 206 Uses the internal non-interactive simple merge algorithm for merging
207 207 files. It will fail if there are any conflicts and leave markers in
208 208 the partially merged file."""
209 209 tool, toolpath, binary, symlink = toolconf
210 210 if symlink:
211 211 repo.ui.warn(_('warning: internal:merge cannot merge symlinks '
212 212 'for %s\n') % fcd.path())
213 213 return False, 1
214 214
215 215 r = _premerge(repo, toolconf, files)
216 216 if r:
217 217 a, b, c, back = files
218 218
219 219 ui = repo.ui
220 220
221 221 r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
222 222 return True, r
223 223 return False, 0
224 224
225 225 @internaltool('dump', True)
226 226 def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files):
227 227 """
228 228 Creates three versions of the files to merge, containing the
229 229 contents of local, other and base. These files can then be used to
230 230 perform a merge manually. If the file to be merged is named
231 231 ``a.txt``, these files will accordingly be named ``a.txt.local``,
232 232 ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
233 233 same directory as ``a.txt``."""
234 234 r = _premerge(repo, toolconf, files)
235 235 if r:
236 236 a, b, c, back = files
237 237
238 238 fd = fcd.path()
239 239
240 240 util.copyfile(a, a + ".local")
241 241 repo.wwrite(fd + ".other", fco.data(), fco.flags())
242 242 repo.wwrite(fd + ".base", fca.data(), fca.flags())
243 243 return False, r
244 244
245 245 def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files):
246 246 r = _premerge(repo, toolconf, files)
247 247 if r:
248 248 tool, toolpath, binary, symlink = toolconf
249 249 a, b, c, back = files
250 250 out = ""
251 251 env = dict(HG_FILE=fcd.path(),
252 252 HG_MY_NODE=short(mynode),
253 253 HG_OTHER_NODE=str(fco.changectx()),
254 254 HG_BASE_NODE=str(fca.changectx()),
255 255 HG_MY_ISLINK='l' in fcd.flags(),
256 256 HG_OTHER_ISLINK='l' in fco.flags(),
257 257 HG_BASE_ISLINK='l' in fca.flags())
258 258
259 259 ui = repo.ui
260 260
261 261 args = _toolstr(ui, tool, "args", '$local $base $other')
262 262 if "$output" in args:
263 263 out, a = a, back # read input from backup, write to original
264 264 replace = dict(local=a, base=b, other=c, output=out)
265 265 args = util.interpolate(r'\$', replace, args,
266 266 lambda s: util.shellquote(util.localpath(s)))
267 267 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env,
268 268 out=ui.fout)
269 269 return True, r
270 270 return False, 0
271 271
272 272 def filemerge(repo, mynode, orig, fcd, fco, fca):
273 273 """perform a 3-way merge in the working directory
274 274
275 275 mynode = parent node before merge
276 276 orig = original local filename before merge
277 277 fco = other file context
278 278 fca = ancestor file context
279 279 fcd = local file context for current/destination file
280 280 """
281 281
282 282 def temp(prefix, ctx):
283 283 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
284 284 (fd, name) = tempfile.mkstemp(prefix=pre)
285 285 data = repo.wwritedata(ctx.path(), ctx.data())
286 286 f = os.fdopen(fd, "wb")
287 287 f.write(data)
288 288 f.close()
289 289 return name
290 290
291 291 if not fco.cmp(fcd): # files identical?
292 292 return None
293 293
294 294 ui = repo.ui
295 295 fd = fcd.path()
296 296 binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
297 297 symlink = 'l' in fcd.flags() + fco.flags()
298 298 tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
299 299 ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
300 300 (tool, fd, binary, symlink))
301 301
302 302 if tool in internals:
303 303 func = internals[tool]
304 304 trymerge = func.trymerge
305 305 onfailure = func.onfailure
306 306 else:
307 307 func = _xmerge
308 308 trymerge = True
309 309 onfailure = _("merging %s failed!\n")
310 310
311 311 toolconf = tool, toolpath, binary, symlink
312 312
313 313 if not trymerge:
314 314 return func(repo, mynode, orig, fcd, fco, fca, toolconf)
315 315
316 316 a = repo.wjoin(fd)
317 317 b = temp("base", fca)
318 318 c = temp("other", fco)
319 319 back = a + ".orig"
320 320 util.copyfile(a, back)
321 321
322 322 if orig != fco.path():
323 323 ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
324 324 else:
325 325 ui.status(_("merging %s\n") % fd)
326 326
327 327 ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
328 328
329 329 needcheck, r = func(repo, mynode, orig, fcd, fco, fca, toolconf,
330 330 (a, b, c, back))
331 331 if not needcheck:
332 332 if r:
333 333 if onfailure:
334 334 ui.warn(onfailure % fd)
335 335 else:
336 336 os.unlink(back)
337 337
338 338 os.unlink(b)
339 339 os.unlink(c)
340 340 return r
341 341
342 342 if not r and (_toolbool(ui, tool, "checkconflicts") or
343 343 'conflicts' in _toollist(ui, tool, "check")):
344 344 if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(),
345 345 re.MULTILINE):
346 346 r = 1
347 347
348 348 checked = False
349 349 if 'prompt' in _toollist(ui, tool, "check"):
350 350 checked = True
351 if ui.promptchoice(_("was merge of '%s' successful (yn)?") % fd,
352 (_("&Yes"), _("&No")), 1):
351 if ui.promptchoice(_("was merge of '%s' successful (yn)?"
352 "$$ &Yes $$ &No") % fd, 1):
353 353 r = 1
354 354
355 355 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
356 356 'changed' in _toollist(ui, tool, "check")):
357 357 if filecmp.cmp(a, back):
358 358 if ui.promptchoice(_(" output file %s appears unchanged\n"
359 "was merge successful (yn)?") % fd,
360 (_("&Yes"), _("&No")), 1):
359 "was merge successful (yn)?"
360 "$$ &Yes $$ &No") % fd, 1):
361 361 r = 1
362 362
363 363 if _toolbool(ui, tool, "fixeol"):
364 364 _matcheol(a, back)
365 365
366 366 if r:
367 367 if onfailure:
368 368 ui.warn(onfailure % fd)
369 369 else:
370 370 os.unlink(back)
371 371
372 372 os.unlink(b)
373 373 os.unlink(c)
374 374 return r
375 375
376 376 # tell hggettext to extract docstrings from these functions:
377 377 i18nfunctions = internals.values()
@@ -1,761 +1,761 b''
1 1 # merge.py - directory-level update/merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, nullrev, hex, bin
9 9 from i18n import _
10 10 from mercurial import obsolete
11 11 import error, util, filemerge, copies, subrepo, worker, dicthelpers
12 12 import errno, os, shutil
13 13
14 14 class mergestate(object):
15 15 '''track 3-way merge state of individual files'''
16 16 def __init__(self, repo):
17 17 self._repo = repo
18 18 self._dirty = False
19 19 self._read()
20 20 def reset(self, node=None):
21 21 self._state = {}
22 22 if node:
23 23 self._local = node
24 24 shutil.rmtree(self._repo.join("merge"), True)
25 25 self._dirty = False
26 26 def _read(self):
27 27 self._state = {}
28 28 try:
29 29 f = self._repo.opener("merge/state")
30 30 for i, l in enumerate(f):
31 31 if i == 0:
32 32 self._local = bin(l[:-1])
33 33 else:
34 34 bits = l[:-1].split("\0")
35 35 self._state[bits[0]] = bits[1:]
36 36 f.close()
37 37 except IOError, err:
38 38 if err.errno != errno.ENOENT:
39 39 raise
40 40 self._dirty = False
41 41 def commit(self):
42 42 if self._dirty:
43 43 f = self._repo.opener("merge/state", "w")
44 44 f.write(hex(self._local) + "\n")
45 45 for d, v in self._state.iteritems():
46 46 f.write("\0".join([d] + v) + "\n")
47 47 f.close()
48 48 self._dirty = False
49 49 def add(self, fcl, fco, fca, fd):
50 50 hash = util.sha1(fcl.path()).hexdigest()
51 51 self._repo.opener.write("merge/" + hash, fcl.data())
52 52 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
53 53 hex(fca.filenode()), fco.path(), fcl.flags()]
54 54 self._dirty = True
55 55 def __contains__(self, dfile):
56 56 return dfile in self._state
57 57 def __getitem__(self, dfile):
58 58 return self._state[dfile][0]
59 59 def __iter__(self):
60 60 l = self._state.keys()
61 61 l.sort()
62 62 for f in l:
63 63 yield f
64 64 def mark(self, dfile, state):
65 65 self._state[dfile][0] = state
66 66 self._dirty = True
67 67 def resolve(self, dfile, wctx, octx):
68 68 if self[dfile] == 'r':
69 69 return 0
70 70 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
71 71 fcd = wctx[dfile]
72 72 fco = octx[ofile]
73 73 fca = self._repo.filectx(afile, fileid=anode)
74 74 # "premerge" x flags
75 75 flo = fco.flags()
76 76 fla = fca.flags()
77 77 if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
78 78 if fca.node() == nullid:
79 79 self._repo.ui.warn(_('warning: cannot merge flags for %s\n') %
80 80 afile)
81 81 elif flags == fla:
82 82 flags = flo
83 83 # restore local
84 84 f = self._repo.opener("merge/" + hash)
85 85 self._repo.wwrite(dfile, f.read(), flags)
86 86 f.close()
87 87 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
88 88 if r is None:
89 89 # no real conflict
90 90 del self._state[dfile]
91 91 elif not r:
92 92 self.mark(dfile, 'r')
93 93 return r
94 94
95 95 def _checkunknownfile(repo, wctx, mctx, f):
96 96 return (not repo.dirstate._ignore(f)
97 97 and os.path.isfile(repo.wjoin(f))
98 98 and repo.wopener.audit.check(f)
99 99 and repo.dirstate.normalize(f) not in repo.dirstate
100 100 and mctx[f].cmp(wctx[f]))
101 101
102 102 def _checkunknown(repo, wctx, mctx):
103 103 "check for collisions between unknown files and files in mctx"
104 104
105 105 error = False
106 106 for f in mctx:
107 107 if f not in wctx and _checkunknownfile(repo, wctx, mctx, f):
108 108 error = True
109 109 wctx._repo.ui.warn(_("%s: untracked file differs\n") % f)
110 110 if error:
111 111 raise util.Abort(_("untracked files in working directory differ "
112 112 "from files in requested revision"))
113 113
114 114 def _forgetremoved(wctx, mctx, branchmerge):
115 115 """
116 116 Forget removed files
117 117
118 118 If we're jumping between revisions (as opposed to merging), and if
119 119 neither the working directory nor the target rev has the file,
120 120 then we need to remove it from the dirstate, to prevent the
121 121 dirstate from listing the file when it is no longer in the
122 122 manifest.
123 123
124 124 If we're merging, and the other revision has removed a file
125 125 that is not present in the working directory, we need to mark it
126 126 as removed.
127 127 """
128 128
129 129 actions = []
130 130 state = branchmerge and 'r' or 'f'
131 131 for f in wctx.deleted():
132 132 if f not in mctx:
133 133 actions.append((f, state, None, "forget deleted"))
134 134
135 135 if not branchmerge:
136 136 for f in wctx.removed():
137 137 if f not in mctx:
138 138 actions.append((f, "f", None, "forget removed"))
139 139
140 140 return actions
141 141
142 142 def _checkcollision(repo, wmf, actions, prompts):
143 143 # build provisional merged manifest up
144 144 pmmf = set(wmf)
145 145
146 146 def addop(f, args):
147 147 pmmf.add(f)
148 148 def removeop(f, args):
149 149 pmmf.discard(f)
150 150 def nop(f, args):
151 151 pass
152 152
153 153 def renameop(f, args):
154 154 f2, fd, flags = args
155 155 if f:
156 156 pmmf.discard(f)
157 157 pmmf.add(fd)
158 158 def mergeop(f, args):
159 159 f2, fd, move = args
160 160 if move:
161 161 pmmf.discard(f)
162 162 pmmf.add(fd)
163 163
164 164 opmap = {
165 165 "a": addop,
166 166 "d": renameop,
167 167 "dr": nop,
168 168 "e": nop,
169 169 "f": addop, # untracked file should be kept in working directory
170 170 "g": addop,
171 171 "m": mergeop,
172 172 "r": removeop,
173 173 "rd": nop,
174 174 }
175 175 for f, m, args, msg in actions:
176 176 op = opmap.get(m)
177 177 assert op, m
178 178 op(f, args)
179 179
180 180 opmap = {
181 181 "cd": addop,
182 182 "dc": addop,
183 183 }
184 184 for f, m in prompts:
185 185 op = opmap.get(m)
186 186 assert op, m
187 187 op(f, None)
188 188
189 189 # check case-folding collision in provisional merged manifest
190 190 foldmap = {}
191 191 for f in sorted(pmmf):
192 192 fold = util.normcase(f)
193 193 if fold in foldmap:
194 194 raise util.Abort(_("case-folding collision between %s and %s")
195 195 % (f, foldmap[fold]))
196 196 foldmap[fold] = f
197 197
198 198 def manifestmerge(repo, wctx, p2, pa, branchmerge, force, partial,
199 199 acceptremote=False):
200 200 """
201 201 Merge p1 and p2 with ancestor pa and generate merge action list
202 202
203 203 branchmerge and force are as passed in to update
204 204 partial = function to filter file lists
205 205 acceptremote = accept the incoming changes without prompting
206 206 """
207 207
208 208 overwrite = force and not branchmerge
209 209 actions, copy, movewithdir = [], {}, {}
210 210
211 211 followcopies = False
212 212 if overwrite:
213 213 pa = wctx
214 214 elif pa == p2: # backwards
215 215 pa = wctx.p1()
216 216 elif not branchmerge and not wctx.dirty(missing=True):
217 217 pass
218 218 elif pa and repo.ui.configbool("merge", "followcopies", True):
219 219 followcopies = True
220 220
221 221 # manifests fetched in order are going to be faster, so prime the caches
222 222 [x.manifest() for x in
223 223 sorted(wctx.parents() + [p2, pa], key=lambda x: x.rev())]
224 224
225 225 if followcopies:
226 226 ret = copies.mergecopies(repo, wctx, p2, pa)
227 227 copy, movewithdir, diverge, renamedelete = ret
228 228 for of, fl in diverge.iteritems():
229 229 actions.append((of, "dr", (fl,), "divergent renames"))
230 230 for of, fl in renamedelete.iteritems():
231 231 actions.append((of, "rd", (fl,), "rename and delete"))
232 232
233 233 repo.ui.note(_("resolving manifests\n"))
234 234 repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
235 235 % (bool(branchmerge), bool(force), bool(partial)))
236 236 repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
237 237
238 238 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
239 239 copied = set(copy.values())
240 240 copied.update(movewithdir.values())
241 241
242 242 if '.hgsubstate' in m1:
243 243 # check whether sub state is modified
244 244 for s in sorted(wctx.substate):
245 245 if wctx.sub(s).dirty():
246 246 m1['.hgsubstate'] += "+"
247 247 break
248 248
249 249 aborts, prompts = [], []
250 250 # Compare manifests
251 251 fdiff = dicthelpers.diff(m1, m2)
252 252 flagsdiff = m1.flagsdiff(m2)
253 253 diff12 = dicthelpers.join(fdiff, flagsdiff)
254 254
255 255 for f, (n12, fl12) in diff12.iteritems():
256 256 if n12:
257 257 n1, n2 = n12
258 258 else: # file contents didn't change, but flags did
259 259 n1 = n2 = m1.get(f, None)
260 260 if n1 is None:
261 261 # Since n1 == n2, the file isn't present in m2 either. This
262 262 # means that the file was removed or deleted locally and
263 263 # removed remotely, but that residual entries remain in flags.
264 264 # This can happen in manifests generated by workingctx.
265 265 continue
266 266 if fl12:
267 267 fl1, fl2 = fl12
268 268 else: # flags didn't change, file contents did
269 269 fl1 = fl2 = m1.flags(f)
270 270
271 271 if partial and not partial(f):
272 272 continue
273 273 if n1 and n2:
274 274 fla = ma.flags(f)
275 275 nol = 'l' not in fl1 + fl2 + fla
276 276 a = ma.get(f, nullid)
277 277 if n2 == a and fl2 == fla:
278 278 pass # remote unchanged - keep local
279 279 elif n1 == a and fl1 == fla: # local unchanged - use remote
280 280 if n1 == n2: # optimization: keep local content
281 281 actions.append((f, "e", (fl2,), "update permissions"))
282 282 else:
283 283 actions.append((f, "g", (fl2,), "remote is newer"))
284 284 elif nol and n2 == a: # remote only changed 'x'
285 285 actions.append((f, "e", (fl2,), "update permissions"))
286 286 elif nol and n1 == a: # local only changed 'x'
287 287 actions.append((f, "g", (fl1,), "remote is newer"))
288 288 else: # both changed something
289 289 actions.append((f, "m", (f, f, False), "versions differ"))
290 290 elif f in copied: # files we'll deal with on m2 side
291 291 pass
292 292 elif n1 and f in movewithdir: # directory rename
293 293 f2 = movewithdir[f]
294 294 actions.append((f, "d", (None, f2, fl1),
295 295 "remote renamed directory to " + f2))
296 296 elif n1 and f in copy:
297 297 f2 = copy[f]
298 298 actions.append((f, "m", (f2, f, False),
299 299 "local copied/moved to " + f2))
300 300 elif n1 and f in ma: # clean, a different, no remote
301 301 if n1 != ma[f]:
302 302 prompts.append((f, "cd")) # prompt changed/deleted
303 303 elif n1[20:] == "a": # added, no remote
304 304 actions.append((f, "f", None, "remote deleted"))
305 305 else:
306 306 actions.append((f, "r", None, "other deleted"))
307 307 elif n2 and f in movewithdir:
308 308 f2 = movewithdir[f]
309 309 actions.append((None, "d", (f, f2, fl2),
310 310 "local renamed directory to " + f2))
311 311 elif n2 and f in copy:
312 312 f2 = copy[f]
313 313 if f2 in m2:
314 314 actions.append((f2, "m", (f, f, False),
315 315 "remote copied to " + f))
316 316 else:
317 317 actions.append((f2, "m", (f, f, True),
318 318 "remote moved to " + f))
319 319 elif n2 and f not in ma:
320 320 # local unknown, remote created: the logic is described by the
321 321 # following table:
322 322 #
323 323 # force branchmerge different | action
324 324 # n * n | get
325 325 # n * y | abort
326 326 # y n * | get
327 327 # y y n | get
328 328 # y y y | merge
329 329 #
330 330 # Checking whether the files are different is expensive, so we
331 331 # don't do that when we can avoid it.
332 332 if force and not branchmerge:
333 333 actions.append((f, "g", (fl2,), "remote created"))
334 334 else:
335 335 different = _checkunknownfile(repo, wctx, p2, f)
336 336 if force and branchmerge and different:
337 337 actions.append((f, "m", (f, f, False),
338 338 "remote differs from untracked local"))
339 339 elif not force and different:
340 340 aborts.append((f, "ud"))
341 341 else:
342 342 actions.append((f, "g", (fl2,), "remote created"))
343 343 elif n2 and n2 != ma[f]:
344 344 prompts.append((f, "dc")) # prompt deleted/changed
345 345
346 346 for f, m in sorted(aborts):
347 347 if m == "ud":
348 348 repo.ui.warn(_("%s: untracked file differs\n") % f)
349 349 else: assert False, m
350 350 if aborts:
351 351 raise util.Abort(_("untracked files in working directory differ "
352 352 "from files in requested revision"))
353 353
354 354 if not util.checkcase(repo.path):
355 355 # check collision between files only in p2 for clean update
356 356 if (not branchmerge and
357 357 (force or not wctx.dirty(missing=True, branch=False))):
358 358 _checkcollision(repo, m2, [], [])
359 359 else:
360 360 _checkcollision(repo, m1, actions, prompts)
361 361
362 362 for f, m in sorted(prompts):
363 363 if m == "cd":
364 364 if acceptremote:
365 365 actions.append((f, "r", None, "remote delete"))
366 366 elif repo.ui.promptchoice(
367 367 _("local changed %s which remote deleted\n"
368 "use (c)hanged version or (d)elete?") % f,
369 (_("&Changed"), _("&Delete")), 0):
368 "use (c)hanged version or (d)elete?"
369 "$$ &Changed $$ &Delete") % f, 0):
370 370 actions.append((f, "r", None, "prompt delete"))
371 371 else:
372 372 actions.append((f, "a", None, "prompt keep"))
373 373 elif m == "dc":
374 374 if acceptremote:
375 375 actions.append((f, "g", (m2.flags(f),), "remote recreating"))
376 376 elif repo.ui.promptchoice(
377 377 _("remote changed %s which local deleted\n"
378 "use (c)hanged version or leave (d)eleted?") % f,
379 (_("&Changed"), _("&Deleted")), 0) == 0:
378 "use (c)hanged version or leave (d)eleted?"
379 "$$ &Changed $$ &Deleted") % f, 0) == 0:
380 380 actions.append((f, "g", (m2.flags(f),), "prompt recreating"))
381 381 else: assert False, m
382 382 return actions
383 383
384 384 def actionkey(a):
385 385 return a[1] == "r" and -1 or 0, a
386 386
387 387 def getremove(repo, mctx, overwrite, args):
388 388 """apply usually-non-interactive updates to the working directory
389 389
390 390 mctx is the context to be merged into the working copy
391 391
392 392 yields tuples for progress updates
393 393 """
394 394 verbose = repo.ui.verbose
395 395 unlink = util.unlinkpath
396 396 wjoin = repo.wjoin
397 397 fctx = mctx.filectx
398 398 wwrite = repo.wwrite
399 399 audit = repo.wopener.audit
400 400 i = 0
401 401 for arg in args:
402 402 f = arg[0]
403 403 if arg[1] == 'r':
404 404 if verbose:
405 405 repo.ui.note(_("removing %s\n") % f)
406 406 audit(f)
407 407 try:
408 408 unlink(wjoin(f), ignoremissing=True)
409 409 except OSError, inst:
410 410 repo.ui.warn(_("update failed to remove %s: %s!\n") %
411 411 (f, inst.strerror))
412 412 else:
413 413 if verbose:
414 414 repo.ui.note(_("getting %s\n") % f)
415 415 wwrite(f, fctx(f).data(), arg[2][0])
416 416 if i == 100:
417 417 yield i, f
418 418 i = 0
419 419 i += 1
420 420 if i > 0:
421 421 yield i, f
422 422
423 423 def applyupdates(repo, actions, wctx, mctx, actx, overwrite):
424 424 """apply the merge action list to the working directory
425 425
426 426 wctx is the working copy context
427 427 mctx is the context to be merged into the working copy
428 428 actx is the context of the common ancestor
429 429
430 430 Return a tuple of counts (updated, merged, removed, unresolved) that
431 431 describes how many files were affected by the update.
432 432 """
433 433
434 434 updated, merged, removed, unresolved = 0, 0, 0, 0
435 435 ms = mergestate(repo)
436 436 ms.reset(wctx.p1().node())
437 437 moves = []
438 438 actions.sort(key=actionkey)
439 439
440 440 # prescan for merges
441 441 for a in actions:
442 442 f, m, args, msg = a
443 443 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
444 444 if m == "m": # merge
445 445 f2, fd, move = args
446 446 if fd == '.hgsubstate': # merged internally
447 447 continue
448 448 repo.ui.debug(" preserving %s for resolve of %s\n" % (f, fd))
449 449 fcl = wctx[f]
450 450 fco = mctx[f2]
451 451 if mctx == actx: # backwards, use working dir parent as ancestor
452 452 if fcl.parents():
453 453 fca = fcl.p1()
454 454 else:
455 455 fca = repo.filectx(f, fileid=nullrev)
456 456 else:
457 457 fca = fcl.ancestor(fco, actx)
458 458 if not fca:
459 459 fca = repo.filectx(f, fileid=nullrev)
460 460 ms.add(fcl, fco, fca, fd)
461 461 if f != fd and move:
462 462 moves.append(f)
463 463
464 464 audit = repo.wopener.audit
465 465
466 466 # remove renamed files after safely stored
467 467 for f in moves:
468 468 if os.path.lexists(repo.wjoin(f)):
469 469 repo.ui.debug("removing %s\n" % f)
470 470 audit(f)
471 471 util.unlinkpath(repo.wjoin(f))
472 472
473 473 numupdates = len(actions)
474 474 workeractions = [a for a in actions if a[1] in 'gr']
475 475 updateactions = [a for a in workeractions if a[1] == 'g']
476 476 updated = len(updateactions)
477 477 removeactions = [a for a in workeractions if a[1] == 'r']
478 478 removed = len(removeactions)
479 479 actions = [a for a in actions if a[1] not in 'gr']
480 480
481 481 hgsub = [a[1] for a in workeractions if a[0] == '.hgsubstate']
482 482 if hgsub and hgsub[0] == 'r':
483 483 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
484 484
485 485 z = 0
486 486 prog = worker.worker(repo.ui, 0.001, getremove, (repo, mctx, overwrite),
487 487 removeactions)
488 488 for i, item in prog:
489 489 z += i
490 490 repo.ui.progress(_('updating'), z, item=item, total=numupdates,
491 491 unit=_('files'))
492 492 prog = worker.worker(repo.ui, 0.001, getremove, (repo, mctx, overwrite),
493 493 updateactions)
494 494 for i, item in prog:
495 495 z += i
496 496 repo.ui.progress(_('updating'), z, item=item, total=numupdates,
497 497 unit=_('files'))
498 498
499 499 if hgsub and hgsub[0] == 'g':
500 500 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
501 501
502 502 _updating = _('updating')
503 503 _files = _('files')
504 504 progress = repo.ui.progress
505 505
506 506 for i, a in enumerate(actions):
507 507 f, m, args, msg = a
508 508 progress(_updating, z + i + 1, item=f, total=numupdates, unit=_files)
509 509 if m == "m": # merge
510 510 f2, fd, move = args
511 511 if fd == '.hgsubstate': # subrepo states need updating
512 512 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
513 513 overwrite)
514 514 continue
515 515 audit(fd)
516 516 r = ms.resolve(fd, wctx, mctx)
517 517 if r is not None and r > 0:
518 518 unresolved += 1
519 519 else:
520 520 if r is None:
521 521 updated += 1
522 522 else:
523 523 merged += 1
524 524 elif m == "d": # directory rename
525 525 f2, fd, flags = args
526 526 if f:
527 527 repo.ui.note(_("moving %s to %s\n") % (f, fd))
528 528 audit(f)
529 529 repo.wwrite(fd, wctx.filectx(f).data(), flags)
530 530 util.unlinkpath(repo.wjoin(f))
531 531 if f2:
532 532 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
533 533 repo.wwrite(fd, mctx.filectx(f2).data(), flags)
534 534 updated += 1
535 535 elif m == "dr": # divergent renames
536 536 fl, = args
537 537 repo.ui.warn(_("note: possible conflict - %s was renamed "
538 538 "multiple times to:\n") % f)
539 539 for nf in fl:
540 540 repo.ui.warn(" %s\n" % nf)
541 541 elif m == "rd": # rename and delete
542 542 fl, = args
543 543 repo.ui.warn(_("note: possible conflict - %s was deleted "
544 544 "and renamed to:\n") % f)
545 545 for nf in fl:
546 546 repo.ui.warn(" %s\n" % nf)
547 547 elif m == "e": # exec
548 548 flags, = args
549 549 audit(f)
550 550 util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
551 551 updated += 1
552 552 ms.commit()
553 553 progress(_updating, None, total=numupdates, unit=_files)
554 554
555 555 return updated, merged, removed, unresolved
556 556
557 557 def calculateupdates(repo, tctx, mctx, ancestor, branchmerge, force, partial,
558 558 acceptremote=False):
559 559 "Calculate the actions needed to merge mctx into tctx"
560 560 actions = []
561 561 actions += manifestmerge(repo, tctx, mctx,
562 562 ancestor,
563 563 branchmerge, force,
564 564 partial, acceptremote)
565 565 if tctx.rev() is None:
566 566 actions += _forgetremoved(tctx, mctx, branchmerge)
567 567 return actions
568 568
569 569 def recordupdates(repo, actions, branchmerge):
570 570 "record merge actions to the dirstate"
571 571
572 572 for a in actions:
573 573 f, m, args, msg = a
574 574 if m == "r": # remove
575 575 if branchmerge:
576 576 repo.dirstate.remove(f)
577 577 else:
578 578 repo.dirstate.drop(f)
579 579 elif m == "a": # re-add
580 580 if not branchmerge:
581 581 repo.dirstate.add(f)
582 582 elif m == "f": # forget
583 583 repo.dirstate.drop(f)
584 584 elif m == "e": # exec change
585 585 repo.dirstate.normallookup(f)
586 586 elif m == "g": # get
587 587 if branchmerge:
588 588 repo.dirstate.otherparent(f)
589 589 else:
590 590 repo.dirstate.normal(f)
591 591 elif m == "m": # merge
592 592 f2, fd, move = args
593 593 if branchmerge:
594 594 # We've done a branch merge, mark this file as merged
595 595 # so that we properly record the merger later
596 596 repo.dirstate.merge(fd)
597 597 if f != f2: # copy/rename
598 598 if move:
599 599 repo.dirstate.remove(f)
600 600 if f != fd:
601 601 repo.dirstate.copy(f, fd)
602 602 else:
603 603 repo.dirstate.copy(f2, fd)
604 604 else:
605 605 # We've update-merged a locally modified file, so
606 606 # we set the dirstate to emulate a normal checkout
607 607 # of that file some time in the past. Thus our
608 608 # merge will appear as a normal local file
609 609 # modification.
610 610 if f2 == fd: # file not locally copied/moved
611 611 repo.dirstate.normallookup(fd)
612 612 if move:
613 613 repo.dirstate.drop(f)
614 614 elif m == "d": # directory rename
615 615 f2, fd, flag = args
616 616 if not f2 and f not in repo.dirstate:
617 617 # untracked file moved
618 618 continue
619 619 if branchmerge:
620 620 repo.dirstate.add(fd)
621 621 if f:
622 622 repo.dirstate.remove(f)
623 623 repo.dirstate.copy(f, fd)
624 624 if f2:
625 625 repo.dirstate.copy(f2, fd)
626 626 else:
627 627 repo.dirstate.normal(fd)
628 628 if f:
629 629 repo.dirstate.drop(f)
630 630
631 631 def update(repo, node, branchmerge, force, partial, ancestor=None,
632 632 mergeancestor=False):
633 633 """
634 634 Perform a merge between the working directory and the given node
635 635
636 636 node = the node to update to, or None if unspecified
637 637 branchmerge = whether to merge between branches
638 638 force = whether to force branch merging or file overwriting
639 639 partial = a function to filter file lists (dirstate not updated)
640 640 mergeancestor = whether it is merging with an ancestor. If true,
641 641 we should accept the incoming changes for any prompts that occur.
642 642 If false, merging with an ancestor (fast-forward) is only allowed
643 643 between different named branches. This flag is used by rebase extension
644 644 as a temporary fix and should be avoided in general.
645 645
646 646 The table below shows all the behaviors of the update command
647 647 given the -c and -C or no options, whether the working directory
648 648 is dirty, whether a revision is specified, and the relationship of
649 649 the parent rev to the target rev (linear, on the same named
650 650 branch, or on another named branch).
651 651
652 652 This logic is tested by test-update-branches.t.
653 653
654 654 -c -C dirty rev | linear same cross
655 655 n n n n | ok (1) x
656 656 n n n y | ok ok ok
657 657 n n y * | merge (2) (2)
658 658 n y * * | --- discard ---
659 659 y n y * | --- (3) ---
660 660 y n n * | --- ok ---
661 661 y y * * | --- (4) ---
662 662
663 663 x = can't happen
664 664 * = don't-care
665 665 1 = abort: crosses branches (use 'hg merge' or 'hg update -c')
666 666 2 = abort: crosses branches (use 'hg merge' to merge or
667 667 use 'hg update -C' to discard changes)
668 668 3 = abort: uncommitted local changes
669 669 4 = incompatible options (checked in commands.py)
670 670
671 671 Return the same tuple as applyupdates().
672 672 """
673 673
674 674 onode = node
675 675 wlock = repo.wlock()
676 676 try:
677 677 wc = repo[None]
678 678 if node is None:
679 679 # tip of current branch
680 680 try:
681 681 node = repo.branchtip(wc.branch())
682 682 except error.RepoLookupError:
683 683 if wc.branch() == "default": # no default branch!
684 684 node = repo.lookup("tip") # update to tip
685 685 else:
686 686 raise util.Abort(_("branch %s not found") % wc.branch())
687 687 overwrite = force and not branchmerge
688 688 pl = wc.parents()
689 689 p1, p2 = pl[0], repo[node]
690 690 if ancestor:
691 691 pa = repo[ancestor]
692 692 else:
693 693 pa = p1.ancestor(p2)
694 694
695 695 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
696 696
697 697 ### check phase
698 698 if not overwrite and len(pl) > 1:
699 699 raise util.Abort(_("outstanding uncommitted merges"))
700 700 if branchmerge:
701 701 if pa == p2:
702 702 raise util.Abort(_("merging with a working directory ancestor"
703 703 " has no effect"))
704 704 elif pa == p1:
705 705 if not mergeancestor and p1.branch() == p2.branch():
706 706 raise util.Abort(_("nothing to merge"),
707 707 hint=_("use 'hg update' "
708 708 "or check 'hg heads'"))
709 709 if not force and (wc.files() or wc.deleted()):
710 710 raise util.Abort(_("outstanding uncommitted changes"),
711 711 hint=_("use 'hg status' to list changes"))
712 712 for s in sorted(wc.substate):
713 713 if wc.sub(s).dirty():
714 714 raise util.Abort(_("outstanding uncommitted changes in "
715 715 "subrepository '%s'") % s)
716 716
717 717 elif not overwrite:
718 718 if pa not in (p1, p2): # nolinear
719 719 dirty = wc.dirty(missing=True)
720 720 if dirty or onode is None:
721 721 # Branching is a bit strange to ensure we do the minimal
722 722 # amount of call to obsolete.background.
723 723 foreground = obsolete.foreground(repo, [p1.node()])
724 724 # note: the <node> variable contains a random identifier
725 725 if repo[node].node() in foreground:
726 726 pa = p1 # allow updating to successors
727 727 elif dirty:
728 728 msg = _("crosses branches (merge branches or use"
729 729 " --clean to discard changes)")
730 730 raise util.Abort(msg)
731 731 else: # node is none
732 732 msg = _("crosses branches (merge branches or update"
733 733 " --check to force update)")
734 734 raise util.Abort(msg)
735 735 else:
736 736 # Allow jumping branches if clean and specific rev given
737 737 pa = p1
738 738
739 739 ### calculate phase
740 740 actions = calculateupdates(repo, wc, p2, pa,
741 741 branchmerge, force, partial, mergeancestor)
742 742
743 743 ### apply phase
744 744 if not branchmerge: # just jump to the new rev
745 745 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
746 746 if not partial:
747 747 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
748 748
749 749 stats = applyupdates(repo, actions, wc, p2, pa, overwrite)
750 750
751 751 if not partial:
752 752 repo.setparents(fp1, fp2)
753 753 recordupdates(repo, actions, branchmerge)
754 754 if not branchmerge:
755 755 repo.dirstate.setbranch(p2.branch())
756 756 finally:
757 757 wlock.release()
758 758
759 759 if not partial:
760 760 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
761 761 return stats
@@ -1,1451 +1,1452 b''
1 1 # subrepo.py - sub-repository handling for Mercurial
2 2 #
3 3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import errno, os, re, xml.dom.minidom, shutil, posixpath, sys
9 9 import stat, subprocess, tarfile
10 10 from i18n import _
11 11 import config, scmutil, util, node, error, cmdutil, bookmarks, match as matchmod
12 12 hg = None
13 13 propertycache = util.propertycache
14 14
15 15 nullstate = ('', '', 'empty')
16 16
17 17 def _expandedabspath(path):
18 18 '''
19 19 get a path or url and if it is a path expand it and return an absolute path
20 20 '''
21 21 expandedpath = util.urllocalpath(util.expandpath(path))
22 22 u = util.url(expandedpath)
23 23 if not u.scheme:
24 24 path = util.normpath(os.path.abspath(u.path))
25 25 return path
26 26
27 27 def _getstorehashcachename(remotepath):
28 28 '''get a unique filename for the store hash cache of a remote repository'''
29 29 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
30 30
31 31 def _calcfilehash(filename):
32 32 data = ''
33 33 if os.path.exists(filename):
34 34 fd = open(filename, 'rb')
35 35 data = fd.read()
36 36 fd.close()
37 37 return util.sha1(data).hexdigest()
38 38
39 39 class SubrepoAbort(error.Abort):
40 40 """Exception class used to avoid handling a subrepo error more than once"""
41 41 def __init__(self, *args, **kw):
42 42 error.Abort.__init__(self, *args, **kw)
43 43 self.subrepo = kw.get('subrepo')
44 44 self.cause = kw.get('cause')
45 45
46 46 def annotatesubrepoerror(func):
47 47 def decoratedmethod(self, *args, **kargs):
48 48 try:
49 49 res = func(self, *args, **kargs)
50 50 except SubrepoAbort, ex:
51 51 # This exception has already been handled
52 52 raise ex
53 53 except error.Abort, ex:
54 54 subrepo = subrelpath(self)
55 55 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
56 56 # avoid handling this exception by raising a SubrepoAbort exception
57 57 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
58 58 cause=sys.exc_info())
59 59 return res
60 60 return decoratedmethod
61 61
62 62 def state(ctx, ui):
63 63 """return a state dict, mapping subrepo paths configured in .hgsub
64 64 to tuple: (source from .hgsub, revision from .hgsubstate, kind
65 65 (key in types dict))
66 66 """
67 67 p = config.config()
68 68 def read(f, sections=None, remap=None):
69 69 if f in ctx:
70 70 try:
71 71 data = ctx[f].data()
72 72 except IOError, err:
73 73 if err.errno != errno.ENOENT:
74 74 raise
75 75 # handle missing subrepo spec files as removed
76 76 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
77 77 return
78 78 p.parse(f, data, sections, remap, read)
79 79 else:
80 80 raise util.Abort(_("subrepo spec file %s not found") % f)
81 81
82 82 if '.hgsub' in ctx:
83 83 read('.hgsub')
84 84
85 85 for path, src in ui.configitems('subpaths'):
86 86 p.set('subpaths', path, src, ui.configsource('subpaths', path))
87 87
88 88 rev = {}
89 89 if '.hgsubstate' in ctx:
90 90 try:
91 91 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
92 92 l = l.lstrip()
93 93 if not l:
94 94 continue
95 95 try:
96 96 revision, path = l.split(" ", 1)
97 97 except ValueError:
98 98 raise util.Abort(_("invalid subrepository revision "
99 99 "specifier in .hgsubstate line %d")
100 100 % (i + 1))
101 101 rev[path] = revision
102 102 except IOError, err:
103 103 if err.errno != errno.ENOENT:
104 104 raise
105 105
106 106 def remap(src):
107 107 for pattern, repl in p.items('subpaths'):
108 108 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
109 109 # does a string decode.
110 110 repl = repl.encode('string-escape')
111 111 # However, we still want to allow back references to go
112 112 # through unharmed, so we turn r'\\1' into r'\1'. Again,
113 113 # extra escapes are needed because re.sub string decodes.
114 114 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
115 115 try:
116 116 src = re.sub(pattern, repl, src, 1)
117 117 except re.error, e:
118 118 raise util.Abort(_("bad subrepository pattern in %s: %s")
119 119 % (p.source('subpaths', pattern), e))
120 120 return src
121 121
122 122 state = {}
123 123 for path, src in p[''].items():
124 124 kind = 'hg'
125 125 if src.startswith('['):
126 126 if ']' not in src:
127 127 raise util.Abort(_('missing ] in subrepo source'))
128 128 kind, src = src.split(']', 1)
129 129 kind = kind[1:]
130 130 src = src.lstrip() # strip any extra whitespace after ']'
131 131
132 132 if not util.url(src).isabs():
133 133 parent = _abssource(ctx._repo, abort=False)
134 134 if parent:
135 135 parent = util.url(parent)
136 136 parent.path = posixpath.join(parent.path or '', src)
137 137 parent.path = posixpath.normpath(parent.path)
138 138 joined = str(parent)
139 139 # Remap the full joined path and use it if it changes,
140 140 # else remap the original source.
141 141 remapped = remap(joined)
142 142 if remapped == joined:
143 143 src = remap(src)
144 144 else:
145 145 src = remapped
146 146
147 147 src = remap(src)
148 148 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
149 149
150 150 return state
151 151
152 152 def writestate(repo, state):
153 153 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
154 154 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
155 155 repo.wwrite('.hgsubstate', ''.join(lines), '')
156 156
157 157 def submerge(repo, wctx, mctx, actx, overwrite):
158 158 """delegated from merge.applyupdates: merging of .hgsubstate file
159 159 in working context, merging context and ancestor context"""
160 160 if mctx == actx: # backwards?
161 161 actx = wctx.p1()
162 162 s1 = wctx.substate
163 163 s2 = mctx.substate
164 164 sa = actx.substate
165 165 sm = {}
166 166
167 167 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
168 168
169 169 def debug(s, msg, r=""):
170 170 if r:
171 171 r = "%s:%s:%s" % r
172 172 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
173 173
174 174 for s, l in sorted(s1.iteritems()):
175 175 a = sa.get(s, nullstate)
176 176 ld = l # local state with possible dirty flag for compares
177 177 if wctx.sub(s).dirty():
178 178 ld = (l[0], l[1] + "+")
179 179 if wctx == actx: # overwrite
180 180 a = ld
181 181
182 182 if s in s2:
183 183 r = s2[s]
184 184 if ld == r or r == a: # no change or local is newer
185 185 sm[s] = l
186 186 continue
187 187 elif ld == a: # other side changed
188 188 debug(s, "other changed, get", r)
189 189 wctx.sub(s).get(r, overwrite)
190 190 sm[s] = r
191 191 elif ld[0] != r[0]: # sources differ
192 192 if repo.ui.promptchoice(
193 193 _(' subrepository sources for %s differ\n'
194 'use (l)ocal source (%s) or (r)emote source (%s)?')
195 % (s, l[0], r[0]),
196 (_('&Local'), _('&Remote')), 0):
194 'use (l)ocal source (%s) or (r)emote source (%s)?'
195 '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
197 196 debug(s, "prompt changed, get", r)
198 197 wctx.sub(s).get(r, overwrite)
199 198 sm[s] = r
200 199 elif ld[1] == a[1]: # local side is unchanged
201 200 debug(s, "other side changed, get", r)
202 201 wctx.sub(s).get(r, overwrite)
203 202 sm[s] = r
204 203 else:
205 204 debug(s, "both sides changed, merge with", r)
206 205 wctx.sub(s).merge(r)
207 206 sm[s] = l
208 207 elif ld == a: # remote removed, local unchanged
209 208 debug(s, "remote removed, remove")
210 209 wctx.sub(s).remove()
211 210 elif a == nullstate: # not present in remote or ancestor
212 211 debug(s, "local added, keep")
213 212 sm[s] = l
214 213 continue
215 214 else:
216 215 if repo.ui.promptchoice(
217 216 _(' local changed subrepository %s which remote removed\n'
218 'use (c)hanged version or (d)elete?') % s,
219 (_('&Changed'), _('&Delete')), 0):
217 'use (c)hanged version or (d)elete?'
218 '$$ &Changed $$ &Delete') % s, 0):
220 219 debug(s, "prompt remove")
221 220 wctx.sub(s).remove()
222 221
223 222 for s, r in sorted(s2.items()):
224 223 if s in s1:
225 224 continue
226 225 elif s not in sa:
227 226 debug(s, "remote added, get", r)
228 227 mctx.sub(s).get(r)
229 228 sm[s] = r
230 229 elif r != sa[s]:
231 230 if repo.ui.promptchoice(
232 231 _(' remote changed subrepository %s which local removed\n'
233 'use (c)hanged version or (d)elete?') % s,
234 (_('&Changed'), _('&Delete')), 0) == 0:
232 'use (c)hanged version or (d)elete?'
233 '$$ &Changed $$ &Delete') % s, 0) == 0:
235 234 debug(s, "prompt recreate", r)
236 235 wctx.sub(s).get(r)
237 236 sm[s] = r
238 237
239 238 # record merged .hgsubstate
240 239 writestate(repo, sm)
241 240
242 241 def _updateprompt(ui, sub, dirty, local, remote):
243 242 if dirty:
244 243 msg = (_(' subrepository sources for %s differ\n'
245 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
244 'use (l)ocal source (%s) or (r)emote source (%s)?\n'
245 '$$ &Local $$ &Remote')
246 246 % (subrelpath(sub), local, remote))
247 247 else:
248 248 msg = (_(' subrepository sources for %s differ (in checked out '
249 249 'version)\n'
250 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
250 'use (l)ocal source (%s) or (r)emote source (%s)?\n'
251 '$$ &Local $$ &Remote')
251 252 % (subrelpath(sub), local, remote))
252 return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
253 return ui.promptchoice(msg, 0)
253 254
254 255 def reporelpath(repo):
255 256 """return path to this (sub)repo as seen from outermost repo"""
256 257 parent = repo
257 258 while util.safehasattr(parent, '_subparent'):
258 259 parent = parent._subparent
259 260 p = parent.root.rstrip(os.sep)
260 261 return repo.root[len(p) + 1:]
261 262
262 263 def subrelpath(sub):
263 264 """return path to this subrepo as seen from outermost repo"""
264 265 if util.safehasattr(sub, '_relpath'):
265 266 return sub._relpath
266 267 if not util.safehasattr(sub, '_repo'):
267 268 return sub._path
268 269 return reporelpath(sub._repo)
269 270
270 271 def _abssource(repo, push=False, abort=True):
271 272 """return pull/push path of repo - either based on parent repo .hgsub info
272 273 or on the top repo config. Abort or return None if no source found."""
273 274 if util.safehasattr(repo, '_subparent'):
274 275 source = util.url(repo._subsource)
275 276 if source.isabs():
276 277 return str(source)
277 278 source.path = posixpath.normpath(source.path)
278 279 parent = _abssource(repo._subparent, push, abort=False)
279 280 if parent:
280 281 parent = util.url(util.pconvert(parent))
281 282 parent.path = posixpath.join(parent.path or '', source.path)
282 283 parent.path = posixpath.normpath(parent.path)
283 284 return str(parent)
284 285 else: # recursion reached top repo
285 286 if util.safehasattr(repo, '_subtoppath'):
286 287 return repo._subtoppath
287 288 if push and repo.ui.config('paths', 'default-push'):
288 289 return repo.ui.config('paths', 'default-push')
289 290 if repo.ui.config('paths', 'default'):
290 291 return repo.ui.config('paths', 'default')
291 292 if repo.sharedpath != repo.path:
292 293 # chop off the .hg component to get the default path form
293 294 return os.path.dirname(repo.sharedpath)
294 295 if abort:
295 296 raise util.Abort(_("default path for subrepository not found"))
296 297
297 298 def itersubrepos(ctx1, ctx2):
298 299 """find subrepos in ctx1 or ctx2"""
299 300 # Create a (subpath, ctx) mapping where we prefer subpaths from
300 301 # ctx1. The subpaths from ctx2 are important when the .hgsub file
301 302 # has been modified (in ctx2) but not yet committed (in ctx1).
302 303 subpaths = dict.fromkeys(ctx2.substate, ctx2)
303 304 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
304 305 for subpath, ctx in sorted(subpaths.iteritems()):
305 306 yield subpath, ctx.sub(subpath)
306 307
307 308 def subrepo(ctx, path):
308 309 """return instance of the right subrepo class for subrepo in path"""
309 310 # subrepo inherently violates our import layering rules
310 311 # because it wants to make repo objects from deep inside the stack
311 312 # so we manually delay the circular imports to not break
312 313 # scripts that don't use our demand-loading
313 314 global hg
314 315 import hg as h
315 316 hg = h
316 317
317 318 scmutil.pathauditor(ctx._repo.root)(path)
318 319 state = ctx.substate[path]
319 320 if state[2] not in types:
320 321 raise util.Abort(_('unknown subrepo type %s') % state[2])
321 322 return types[state[2]](ctx, path, state[:2])
322 323
323 324 # subrepo classes need to implement the following abstract class:
324 325
325 326 class abstractsubrepo(object):
326 327
327 328 def storeclean(self, path):
328 329 """
329 330 returns true if the repository has not changed since it was last
330 331 cloned from or pushed to a given repository.
331 332 """
332 333 return False
333 334
334 335 def dirty(self, ignoreupdate=False):
335 336 """returns true if the dirstate of the subrepo is dirty or does not
336 337 match current stored state. If ignoreupdate is true, only check
337 338 whether the subrepo has uncommitted changes in its dirstate.
338 339 """
339 340 raise NotImplementedError
340 341
341 342 def basestate(self):
342 343 """current working directory base state, disregarding .hgsubstate
343 344 state and working directory modifications"""
344 345 raise NotImplementedError
345 346
346 347 def checknested(self, path):
347 348 """check if path is a subrepository within this repository"""
348 349 return False
349 350
350 351 def commit(self, text, user, date):
351 352 """commit the current changes to the subrepo with the given
352 353 log message. Use given user and date if possible. Return the
353 354 new state of the subrepo.
354 355 """
355 356 raise NotImplementedError
356 357
357 358 def remove(self):
358 359 """remove the subrepo
359 360
360 361 (should verify the dirstate is not dirty first)
361 362 """
362 363 raise NotImplementedError
363 364
364 365 def get(self, state, overwrite=False):
365 366 """run whatever commands are needed to put the subrepo into
366 367 this state
367 368 """
368 369 raise NotImplementedError
369 370
370 371 def merge(self, state):
371 372 """merge currently-saved state with the new state."""
372 373 raise NotImplementedError
373 374
374 375 def push(self, opts):
375 376 """perform whatever action is analogous to 'hg push'
376 377
377 378 This may be a no-op on some systems.
378 379 """
379 380 raise NotImplementedError
380 381
381 382 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
382 383 return []
383 384
384 385 def status(self, rev2, **opts):
385 386 return [], [], [], [], [], [], []
386 387
387 388 def diff(self, ui, diffopts, node2, match, prefix, **opts):
388 389 pass
389 390
390 391 def outgoing(self, ui, dest, opts):
391 392 return 1
392 393
393 394 def incoming(self, ui, source, opts):
394 395 return 1
395 396
396 397 def files(self):
397 398 """return filename iterator"""
398 399 raise NotImplementedError
399 400
400 401 def filedata(self, name):
401 402 """return file data"""
402 403 raise NotImplementedError
403 404
404 405 def fileflags(self, name):
405 406 """return file flags"""
406 407 return ''
407 408
408 409 def archive(self, ui, archiver, prefix, match=None):
409 410 if match is not None:
410 411 files = [f for f in self.files() if match(f)]
411 412 else:
412 413 files = self.files()
413 414 total = len(files)
414 415 relpath = subrelpath(self)
415 416 ui.progress(_('archiving (%s)') % relpath, 0,
416 417 unit=_('files'), total=total)
417 418 for i, name in enumerate(files):
418 419 flags = self.fileflags(name)
419 420 mode = 'x' in flags and 0755 or 0644
420 421 symlink = 'l' in flags
421 422 archiver.addfile(os.path.join(prefix, self._path, name),
422 423 mode, symlink, self.filedata(name))
423 424 ui.progress(_('archiving (%s)') % relpath, i + 1,
424 425 unit=_('files'), total=total)
425 426 ui.progress(_('archiving (%s)') % relpath, None)
426 427 return total
427 428
428 429 def walk(self, match):
429 430 '''
430 431 walk recursively through the directory tree, finding all files
431 432 matched by the match function
432 433 '''
433 434 pass
434 435
435 436 def forget(self, ui, match, prefix):
436 437 return ([], [])
437 438
438 439 def revert(self, ui, substate, *pats, **opts):
439 440 ui.warn('%s: reverting %s subrepos is unsupported\n' \
440 441 % (substate[0], substate[2]))
441 442 return []
442 443
443 444 class hgsubrepo(abstractsubrepo):
444 445 def __init__(self, ctx, path, state):
445 446 self._path = path
446 447 self._state = state
447 448 r = ctx._repo
448 449 root = r.wjoin(path)
449 450 create = False
450 451 if not os.path.exists(os.path.join(root, '.hg')):
451 452 create = True
452 453 util.makedirs(root)
453 454 self._repo = hg.repository(r.baseui, root, create=create)
454 455 for s, k in [('ui', 'commitsubrepos')]:
455 456 v = r.ui.config(s, k)
456 457 if v:
457 458 self._repo.ui.setconfig(s, k, v)
458 459 self._repo.ui.setconfig('ui', '_usedassubrepo', 'True')
459 460 self._initrepo(r, state[0], create)
460 461
461 462 def storeclean(self, path):
462 463 clean = True
463 464 lock = self._repo.lock()
464 465 itercache = self._calcstorehash(path)
465 466 try:
466 467 for filehash in self._readstorehashcache(path):
467 468 if filehash != itercache.next():
468 469 clean = False
469 470 break
470 471 except StopIteration:
471 472 # the cached and current pull states have a different size
472 473 clean = False
473 474 if clean:
474 475 try:
475 476 itercache.next()
476 477 # the cached and current pull states have a different size
477 478 clean = False
478 479 except StopIteration:
479 480 pass
480 481 lock.release()
481 482 return clean
482 483
483 484 def _calcstorehash(self, remotepath):
484 485 '''calculate a unique "store hash"
485 486
486 487 This method is used to to detect when there are changes that may
487 488 require a push to a given remote path.'''
488 489 # sort the files that will be hashed in increasing (likely) file size
489 490 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
490 491 yield '# %s\n' % _expandedabspath(remotepath)
491 492 for relname in filelist:
492 493 absname = os.path.normpath(self._repo.join(relname))
493 494 yield '%s = %s\n' % (relname, _calcfilehash(absname))
494 495
495 496 def _getstorehashcachepath(self, remotepath):
496 497 '''get a unique path for the store hash cache'''
497 498 return self._repo.join(os.path.join(
498 499 'cache', 'storehash', _getstorehashcachename(remotepath)))
499 500
500 501 def _readstorehashcache(self, remotepath):
501 502 '''read the store hash cache for a given remote repository'''
502 503 cachefile = self._getstorehashcachepath(remotepath)
503 504 if not os.path.exists(cachefile):
504 505 return ''
505 506 fd = open(cachefile, 'r')
506 507 pullstate = fd.readlines()
507 508 fd.close()
508 509 return pullstate
509 510
510 511 def _cachestorehash(self, remotepath):
511 512 '''cache the current store hash
512 513
513 514 Each remote repo requires its own store hash cache, because a subrepo
514 515 store may be "clean" versus a given remote repo, but not versus another
515 516 '''
516 517 cachefile = self._getstorehashcachepath(remotepath)
517 518 lock = self._repo.lock()
518 519 storehash = list(self._calcstorehash(remotepath))
519 520 cachedir = os.path.dirname(cachefile)
520 521 if not os.path.exists(cachedir):
521 522 util.makedirs(cachedir, notindexed=True)
522 523 fd = open(cachefile, 'w')
523 524 fd.writelines(storehash)
524 525 fd.close()
525 526 lock.release()
526 527
527 528 @annotatesubrepoerror
528 529 def _initrepo(self, parentrepo, source, create):
529 530 self._repo._subparent = parentrepo
530 531 self._repo._subsource = source
531 532
532 533 if create:
533 534 fp = self._repo.opener("hgrc", "w", text=True)
534 535 fp.write('[paths]\n')
535 536
536 537 def addpathconfig(key, value):
537 538 if value:
538 539 fp.write('%s = %s\n' % (key, value))
539 540 self._repo.ui.setconfig('paths', key, value)
540 541
541 542 defpath = _abssource(self._repo, abort=False)
542 543 defpushpath = _abssource(self._repo, True, abort=False)
543 544 addpathconfig('default', defpath)
544 545 if defpath != defpushpath:
545 546 addpathconfig('default-push', defpushpath)
546 547 fp.close()
547 548
548 549 @annotatesubrepoerror
549 550 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
550 551 return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
551 552 os.path.join(prefix, self._path), explicitonly)
552 553
553 554 @annotatesubrepoerror
554 555 def status(self, rev2, **opts):
555 556 try:
556 557 rev1 = self._state[1]
557 558 ctx1 = self._repo[rev1]
558 559 ctx2 = self._repo[rev2]
559 560 return self._repo.status(ctx1, ctx2, **opts)
560 561 except error.RepoLookupError, inst:
561 562 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
562 563 % (inst, subrelpath(self)))
563 564 return [], [], [], [], [], [], []
564 565
565 566 @annotatesubrepoerror
566 567 def diff(self, ui, diffopts, node2, match, prefix, **opts):
567 568 try:
568 569 node1 = node.bin(self._state[1])
569 570 # We currently expect node2 to come from substate and be
570 571 # in hex format
571 572 if node2 is not None:
572 573 node2 = node.bin(node2)
573 574 cmdutil.diffordiffstat(ui, self._repo, diffopts,
574 575 node1, node2, match,
575 576 prefix=posixpath.join(prefix, self._path),
576 577 listsubrepos=True, **opts)
577 578 except error.RepoLookupError, inst:
578 579 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
579 580 % (inst, subrelpath(self)))
580 581
581 582 @annotatesubrepoerror
582 583 def archive(self, ui, archiver, prefix, match=None):
583 584 self._get(self._state + ('hg',))
584 585 total = abstractsubrepo.archive(self, ui, archiver, prefix, match)
585 586 rev = self._state[1]
586 587 ctx = self._repo[rev]
587 588 for subpath in ctx.substate:
588 589 s = subrepo(ctx, subpath)
589 590 submatch = matchmod.narrowmatcher(subpath, match)
590 591 total += s.archive(
591 592 ui, archiver, os.path.join(prefix, self._path), submatch)
592 593 return total
593 594
594 595 @annotatesubrepoerror
595 596 def dirty(self, ignoreupdate=False):
596 597 r = self._state[1]
597 598 if r == '' and not ignoreupdate: # no state recorded
598 599 return True
599 600 w = self._repo[None]
600 601 if r != w.p1().hex() and not ignoreupdate:
601 602 # different version checked out
602 603 return True
603 604 return w.dirty() # working directory changed
604 605
605 606 def basestate(self):
606 607 return self._repo['.'].hex()
607 608
608 609 def checknested(self, path):
609 610 return self._repo._checknested(self._repo.wjoin(path))
610 611
611 612 @annotatesubrepoerror
612 613 def commit(self, text, user, date):
613 614 # don't bother committing in the subrepo if it's only been
614 615 # updated
615 616 if not self.dirty(True):
616 617 return self._repo['.'].hex()
617 618 self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
618 619 n = self._repo.commit(text, user, date)
619 620 if not n:
620 621 return self._repo['.'].hex() # different version checked out
621 622 return node.hex(n)
622 623
623 624 @annotatesubrepoerror
624 625 def remove(self):
625 626 # we can't fully delete the repository as it may contain
626 627 # local-only history
627 628 self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
628 629 hg.clean(self._repo, node.nullid, False)
629 630
630 631 def _get(self, state):
631 632 source, revision, kind = state
632 633 if revision not in self._repo:
633 634 self._repo._subsource = source
634 635 srcurl = _abssource(self._repo)
635 636 other = hg.peer(self._repo, {}, srcurl)
636 637 if len(self._repo) == 0:
637 638 self._repo.ui.status(_('cloning subrepo %s from %s\n')
638 639 % (subrelpath(self), srcurl))
639 640 parentrepo = self._repo._subparent
640 641 shutil.rmtree(self._repo.path)
641 642 other, cloned = hg.clone(self._repo._subparent.baseui, {},
642 643 other, self._repo.root,
643 644 update=False)
644 645 self._repo = cloned.local()
645 646 self._initrepo(parentrepo, source, create=True)
646 647 self._cachestorehash(srcurl)
647 648 else:
648 649 self._repo.ui.status(_('pulling subrepo %s from %s\n')
649 650 % (subrelpath(self), srcurl))
650 651 cleansub = self.storeclean(srcurl)
651 652 remotebookmarks = other.listkeys('bookmarks')
652 653 self._repo.pull(other)
653 654 bookmarks.updatefromremote(self._repo.ui, self._repo,
654 655 remotebookmarks, srcurl)
655 656 if cleansub:
656 657 # keep the repo clean after pull
657 658 self._cachestorehash(srcurl)
658 659
659 660 @annotatesubrepoerror
660 661 def get(self, state, overwrite=False):
661 662 self._get(state)
662 663 source, revision, kind = state
663 664 self._repo.ui.debug("getting subrepo %s\n" % self._path)
664 665 hg.updaterepo(self._repo, revision, overwrite)
665 666
666 667 @annotatesubrepoerror
667 668 def merge(self, state):
668 669 self._get(state)
669 670 cur = self._repo['.']
670 671 dst = self._repo[state[1]]
671 672 anc = dst.ancestor(cur)
672 673
673 674 def mergefunc():
674 675 if anc == cur and dst.branch() == cur.branch():
675 676 self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
676 677 hg.update(self._repo, state[1])
677 678 elif anc == dst:
678 679 self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
679 680 else:
680 681 self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
681 682 hg.merge(self._repo, state[1], remind=False)
682 683
683 684 wctx = self._repo[None]
684 685 if self.dirty():
685 686 if anc != dst:
686 687 if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
687 688 mergefunc()
688 689 else:
689 690 mergefunc()
690 691 else:
691 692 mergefunc()
692 693
693 694 @annotatesubrepoerror
694 695 def push(self, opts):
695 696 force = opts.get('force')
696 697 newbranch = opts.get('new_branch')
697 698 ssh = opts.get('ssh')
698 699
699 700 # push subrepos depth-first for coherent ordering
700 701 c = self._repo['']
701 702 subs = c.substate # only repos that are committed
702 703 for s in sorted(subs):
703 704 if c.sub(s).push(opts) == 0:
704 705 return False
705 706
706 707 dsturl = _abssource(self._repo, True)
707 708 if not force:
708 709 if self.storeclean(dsturl):
709 710 self._repo.ui.status(
710 711 _('no changes made to subrepo %s since last push to %s\n')
711 712 % (subrelpath(self), dsturl))
712 713 return None
713 714 self._repo.ui.status(_('pushing subrepo %s to %s\n') %
714 715 (subrelpath(self), dsturl))
715 716 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
716 717 res = self._repo.push(other, force, newbranch=newbranch)
717 718
718 719 # the repo is now clean
719 720 self._cachestorehash(dsturl)
720 721 return res
721 722
722 723 @annotatesubrepoerror
723 724 def outgoing(self, ui, dest, opts):
724 725 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
725 726
726 727 @annotatesubrepoerror
727 728 def incoming(self, ui, source, opts):
728 729 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
729 730
730 731 @annotatesubrepoerror
731 732 def files(self):
732 733 rev = self._state[1]
733 734 ctx = self._repo[rev]
734 735 return ctx.manifest()
735 736
736 737 def filedata(self, name):
737 738 rev = self._state[1]
738 739 return self._repo[rev][name].data()
739 740
740 741 def fileflags(self, name):
741 742 rev = self._state[1]
742 743 ctx = self._repo[rev]
743 744 return ctx.flags(name)
744 745
745 746 def walk(self, match):
746 747 ctx = self._repo[None]
747 748 return ctx.walk(match)
748 749
749 750 @annotatesubrepoerror
750 751 def forget(self, ui, match, prefix):
751 752 return cmdutil.forget(ui, self._repo, match,
752 753 os.path.join(prefix, self._path), True)
753 754
754 755 @annotatesubrepoerror
755 756 def revert(self, ui, substate, *pats, **opts):
756 757 # reverting a subrepo is a 2 step process:
757 758 # 1. if the no_backup is not set, revert all modified
758 759 # files inside the subrepo
759 760 # 2. update the subrepo to the revision specified in
760 761 # the corresponding substate dictionary
761 762 ui.status(_('reverting subrepo %s\n') % substate[0])
762 763 if not opts.get('no_backup'):
763 764 # Revert all files on the subrepo, creating backups
764 765 # Note that this will not recursively revert subrepos
765 766 # We could do it if there was a set:subrepos() predicate
766 767 opts = opts.copy()
767 768 opts['date'] = None
768 769 opts['rev'] = substate[1]
769 770
770 771 pats = []
771 772 if not opts.get('all'):
772 773 pats = ['set:modified()']
773 774 self.filerevert(ui, *pats, **opts)
774 775
775 776 # Update the repo to the revision specified in the given substate
776 777 self.get(substate, overwrite=True)
777 778
778 779 def filerevert(self, ui, *pats, **opts):
779 780 ctx = self._repo[opts['rev']]
780 781 parents = self._repo.dirstate.parents()
781 782 if opts.get('all'):
782 783 pats = ['set:modified()']
783 784 else:
784 785 pats = []
785 786 cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
786 787
787 788 class svnsubrepo(abstractsubrepo):
788 789 def __init__(self, ctx, path, state):
789 790 self._path = path
790 791 self._state = state
791 792 self._ctx = ctx
792 793 self._ui = ctx._repo.ui
793 794 self._exe = util.findexe('svn')
794 795 if not self._exe:
795 796 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
796 797 % self._path)
797 798
798 799 def _svncommand(self, commands, filename='', failok=False):
799 800 cmd = [self._exe]
800 801 extrakw = {}
801 802 if not self._ui.interactive():
802 803 # Making stdin be a pipe should prevent svn from behaving
803 804 # interactively even if we can't pass --non-interactive.
804 805 extrakw['stdin'] = subprocess.PIPE
805 806 # Starting in svn 1.5 --non-interactive is a global flag
806 807 # instead of being per-command, but we need to support 1.4 so
807 808 # we have to be intelligent about what commands take
808 809 # --non-interactive.
809 810 if commands[0] in ('update', 'checkout', 'commit'):
810 811 cmd.append('--non-interactive')
811 812 cmd.extend(commands)
812 813 if filename is not None:
813 814 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
814 815 cmd.append(path)
815 816 env = dict(os.environ)
816 817 # Avoid localized output, preserve current locale for everything else.
817 818 lc_all = env.get('LC_ALL')
818 819 if lc_all:
819 820 env['LANG'] = lc_all
820 821 del env['LC_ALL']
821 822 env['LC_MESSAGES'] = 'C'
822 823 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
823 824 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
824 825 universal_newlines=True, env=env, **extrakw)
825 826 stdout, stderr = p.communicate()
826 827 stderr = stderr.strip()
827 828 if not failok:
828 829 if p.returncode:
829 830 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
830 831 if stderr:
831 832 self._ui.warn(stderr + '\n')
832 833 return stdout, stderr
833 834
834 835 @propertycache
835 836 def _svnversion(self):
836 837 output, err = self._svncommand(['--version', '--quiet'], filename=None)
837 838 m = re.search(r'^(\d+)\.(\d+)', output)
838 839 if not m:
839 840 raise util.Abort(_('cannot retrieve svn tool version'))
840 841 return (int(m.group(1)), int(m.group(2)))
841 842
842 843 def _wcrevs(self):
843 844 # Get the working directory revision as well as the last
844 845 # commit revision so we can compare the subrepo state with
845 846 # both. We used to store the working directory one.
846 847 output, err = self._svncommand(['info', '--xml'])
847 848 doc = xml.dom.minidom.parseString(output)
848 849 entries = doc.getElementsByTagName('entry')
849 850 lastrev, rev = '0', '0'
850 851 if entries:
851 852 rev = str(entries[0].getAttribute('revision')) or '0'
852 853 commits = entries[0].getElementsByTagName('commit')
853 854 if commits:
854 855 lastrev = str(commits[0].getAttribute('revision')) or '0'
855 856 return (lastrev, rev)
856 857
857 858 def _wcrev(self):
858 859 return self._wcrevs()[0]
859 860
860 861 def _wcchanged(self):
861 862 """Return (changes, extchanges, missing) where changes is True
862 863 if the working directory was changed, extchanges is
863 864 True if any of these changes concern an external entry and missing
864 865 is True if any change is a missing entry.
865 866 """
866 867 output, err = self._svncommand(['status', '--xml'])
867 868 externals, changes, missing = [], [], []
868 869 doc = xml.dom.minidom.parseString(output)
869 870 for e in doc.getElementsByTagName('entry'):
870 871 s = e.getElementsByTagName('wc-status')
871 872 if not s:
872 873 continue
873 874 item = s[0].getAttribute('item')
874 875 props = s[0].getAttribute('props')
875 876 path = e.getAttribute('path')
876 877 if item == 'external':
877 878 externals.append(path)
878 879 elif item == 'missing':
879 880 missing.append(path)
880 881 if (item not in ('', 'normal', 'unversioned', 'external')
881 882 or props not in ('', 'none', 'normal')):
882 883 changes.append(path)
883 884 for path in changes:
884 885 for ext in externals:
885 886 if path == ext or path.startswith(ext + os.sep):
886 887 return True, True, bool(missing)
887 888 return bool(changes), False, bool(missing)
888 889
889 890 def dirty(self, ignoreupdate=False):
890 891 if not self._wcchanged()[0]:
891 892 if self._state[1] in self._wcrevs() or ignoreupdate:
892 893 return False
893 894 return True
894 895
895 896 def basestate(self):
896 897 lastrev, rev = self._wcrevs()
897 898 if lastrev != rev:
898 899 # Last committed rev is not the same than rev. We would
899 900 # like to take lastrev but we do not know if the subrepo
900 901 # URL exists at lastrev. Test it and fallback to rev it
901 902 # is not there.
902 903 try:
903 904 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
904 905 return lastrev
905 906 except error.Abort:
906 907 pass
907 908 return rev
908 909
909 910 @annotatesubrepoerror
910 911 def commit(self, text, user, date):
911 912 # user and date are out of our hands since svn is centralized
912 913 changed, extchanged, missing = self._wcchanged()
913 914 if not changed:
914 915 return self.basestate()
915 916 if extchanged:
916 917 # Do not try to commit externals
917 918 raise util.Abort(_('cannot commit svn externals'))
918 919 if missing:
919 920 # svn can commit with missing entries but aborting like hg
920 921 # seems a better approach.
921 922 raise util.Abort(_('cannot commit missing svn entries'))
922 923 commitinfo, err = self._svncommand(['commit', '-m', text])
923 924 self._ui.status(commitinfo)
924 925 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
925 926 if not newrev:
926 927 if not commitinfo.strip():
927 928 # Sometimes, our definition of "changed" differs from
928 929 # svn one. For instance, svn ignores missing files
929 930 # when committing. If there are only missing files, no
930 931 # commit is made, no output and no error code.
931 932 raise util.Abort(_('failed to commit svn changes'))
932 933 raise util.Abort(commitinfo.splitlines()[-1])
933 934 newrev = newrev.groups()[0]
934 935 self._ui.status(self._svncommand(['update', '-r', newrev])[0])
935 936 return newrev
936 937
937 938 @annotatesubrepoerror
938 939 def remove(self):
939 940 if self.dirty():
940 941 self._ui.warn(_('not removing repo %s because '
941 942 'it has changes.\n' % self._path))
942 943 return
943 944 self._ui.note(_('removing subrepo %s\n') % self._path)
944 945
945 946 def onerror(function, path, excinfo):
946 947 if function is not os.remove:
947 948 raise
948 949 # read-only files cannot be unlinked under Windows
949 950 s = os.stat(path)
950 951 if (s.st_mode & stat.S_IWRITE) != 0:
951 952 raise
952 953 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
953 954 os.remove(path)
954 955
955 956 path = self._ctx._repo.wjoin(self._path)
956 957 shutil.rmtree(path, onerror=onerror)
957 958 try:
958 959 os.removedirs(os.path.dirname(path))
959 960 except OSError:
960 961 pass
961 962
962 963 @annotatesubrepoerror
963 964 def get(self, state, overwrite=False):
964 965 if overwrite:
965 966 self._svncommand(['revert', '--recursive'])
966 967 args = ['checkout']
967 968 if self._svnversion >= (1, 5):
968 969 args.append('--force')
969 970 # The revision must be specified at the end of the URL to properly
970 971 # update to a directory which has since been deleted and recreated.
971 972 args.append('%s@%s' % (state[0], state[1]))
972 973 status, err = self._svncommand(args, failok=True)
973 974 if not re.search('Checked out revision [0-9]+.', status):
974 975 if ('is already a working copy for a different URL' in err
975 976 and (self._wcchanged()[:2] == (False, False))):
976 977 # obstructed but clean working copy, so just blow it away.
977 978 self.remove()
978 979 self.get(state, overwrite=False)
979 980 return
980 981 raise util.Abort((status or err).splitlines()[-1])
981 982 self._ui.status(status)
982 983
983 984 @annotatesubrepoerror
984 985 def merge(self, state):
985 986 old = self._state[1]
986 987 new = state[1]
987 988 wcrev = self._wcrev()
988 989 if new != wcrev:
989 990 dirty = old == wcrev or self._wcchanged()[0]
990 991 if _updateprompt(self._ui, self, dirty, wcrev, new):
991 992 self.get(state, False)
992 993
993 994 def push(self, opts):
994 995 # push is a no-op for SVN
995 996 return True
996 997
997 998 @annotatesubrepoerror
998 999 def files(self):
999 1000 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1000 1001 doc = xml.dom.minidom.parseString(output)
1001 1002 paths = []
1002 1003 for e in doc.getElementsByTagName('entry'):
1003 1004 kind = str(e.getAttribute('kind'))
1004 1005 if kind != 'file':
1005 1006 continue
1006 1007 name = ''.join(c.data for c
1007 1008 in e.getElementsByTagName('name')[0].childNodes
1008 1009 if c.nodeType == c.TEXT_NODE)
1009 1010 paths.append(name.encode('utf-8'))
1010 1011 return paths
1011 1012
1012 1013 def filedata(self, name):
1013 1014 return self._svncommand(['cat'], name)[0]
1014 1015
1015 1016
1016 1017 class gitsubrepo(abstractsubrepo):
1017 1018 def __init__(self, ctx, path, state):
1018 1019 self._state = state
1019 1020 self._ctx = ctx
1020 1021 self._path = path
1021 1022 self._relpath = os.path.join(reporelpath(ctx._repo), path)
1022 1023 self._abspath = ctx._repo.wjoin(path)
1023 1024 self._subparent = ctx._repo
1024 1025 self._ui = ctx._repo.ui
1025 1026 self._ensuregit()
1026 1027
1027 1028 def _ensuregit(self):
1028 1029 try:
1029 1030 self._gitexecutable = 'git'
1030 1031 out, err = self._gitnodir(['--version'])
1031 1032 except OSError, e:
1032 1033 if e.errno != 2 or os.name != 'nt':
1033 1034 raise
1034 1035 self._gitexecutable = 'git.cmd'
1035 1036 out, err = self._gitnodir(['--version'])
1036 1037 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1037 1038 if not m:
1038 1039 self._ui.warn(_('cannot retrieve git version'))
1039 1040 return
1040 1041 version = (int(m.group(1)), m.group(2), m.group(3))
1041 1042 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1042 1043 # despite the docstring comment. For now, error on 1.4.0, warn on
1043 1044 # 1.5.0 but attempt to continue.
1044 1045 if version < (1, 5, 0):
1045 1046 raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
1046 1047 elif version < (1, 6, 0):
1047 1048 self._ui.warn(_('git subrepo requires at least 1.6.0 or later'))
1048 1049
1049 1050 def _gitcommand(self, commands, env=None, stream=False):
1050 1051 return self._gitdir(commands, env=env, stream=stream)[0]
1051 1052
1052 1053 def _gitdir(self, commands, env=None, stream=False):
1053 1054 return self._gitnodir(commands, env=env, stream=stream,
1054 1055 cwd=self._abspath)
1055 1056
1056 1057 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1057 1058 """Calls the git command
1058 1059
1059 1060 The methods tries to call the git command. versions prior to 1.6.0
1060 1061 are not supported and very probably fail.
1061 1062 """
1062 1063 self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1063 1064 # unless ui.quiet is set, print git's stderr,
1064 1065 # which is mostly progress and useful info
1065 1066 errpipe = None
1066 1067 if self._ui.quiet:
1067 1068 errpipe = open(os.devnull, 'w')
1068 1069 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1069 1070 cwd=cwd, env=env, close_fds=util.closefds,
1070 1071 stdout=subprocess.PIPE, stderr=errpipe)
1071 1072 if stream:
1072 1073 return p.stdout, None
1073 1074
1074 1075 retdata = p.stdout.read().strip()
1075 1076 # wait for the child to exit to avoid race condition.
1076 1077 p.wait()
1077 1078
1078 1079 if p.returncode != 0 and p.returncode != 1:
1079 1080 # there are certain error codes that are ok
1080 1081 command = commands[0]
1081 1082 if command in ('cat-file', 'symbolic-ref'):
1082 1083 return retdata, p.returncode
1083 1084 # for all others, abort
1084 1085 raise util.Abort('git %s error %d in %s' %
1085 1086 (command, p.returncode, self._relpath))
1086 1087
1087 1088 return retdata, p.returncode
1088 1089
1089 1090 def _gitmissing(self):
1090 1091 return not os.path.exists(os.path.join(self._abspath, '.git'))
1091 1092
1092 1093 def _gitstate(self):
1093 1094 return self._gitcommand(['rev-parse', 'HEAD'])
1094 1095
1095 1096 def _gitcurrentbranch(self):
1096 1097 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1097 1098 if err:
1098 1099 current = None
1099 1100 return current
1100 1101
1101 1102 def _gitremote(self, remote):
1102 1103 out = self._gitcommand(['remote', 'show', '-n', remote])
1103 1104 line = out.split('\n')[1]
1104 1105 i = line.index('URL: ') + len('URL: ')
1105 1106 return line[i:]
1106 1107
1107 1108 def _githavelocally(self, revision):
1108 1109 out, code = self._gitdir(['cat-file', '-e', revision])
1109 1110 return code == 0
1110 1111
1111 1112 def _gitisancestor(self, r1, r2):
1112 1113 base = self._gitcommand(['merge-base', r1, r2])
1113 1114 return base == r1
1114 1115
1115 1116 def _gitisbare(self):
1116 1117 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1117 1118
1118 1119 def _gitupdatestat(self):
1119 1120 """This must be run before git diff-index.
1120 1121 diff-index only looks at changes to file stat;
1121 1122 this command looks at file contents and updates the stat."""
1122 1123 self._gitcommand(['update-index', '-q', '--refresh'])
1123 1124
1124 1125 def _gitbranchmap(self):
1125 1126 '''returns 2 things:
1126 1127 a map from git branch to revision
1127 1128 a map from revision to branches'''
1128 1129 branch2rev = {}
1129 1130 rev2branch = {}
1130 1131
1131 1132 out = self._gitcommand(['for-each-ref', '--format',
1132 1133 '%(objectname) %(refname)'])
1133 1134 for line in out.split('\n'):
1134 1135 revision, ref = line.split(' ')
1135 1136 if (not ref.startswith('refs/heads/') and
1136 1137 not ref.startswith('refs/remotes/')):
1137 1138 continue
1138 1139 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1139 1140 continue # ignore remote/HEAD redirects
1140 1141 branch2rev[ref] = revision
1141 1142 rev2branch.setdefault(revision, []).append(ref)
1142 1143 return branch2rev, rev2branch
1143 1144
1144 1145 def _gittracking(self, branches):
1145 1146 'return map of remote branch to local tracking branch'
1146 1147 # assumes no more than one local tracking branch for each remote
1147 1148 tracking = {}
1148 1149 for b in branches:
1149 1150 if b.startswith('refs/remotes/'):
1150 1151 continue
1151 1152 bname = b.split('/', 2)[2]
1152 1153 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1153 1154 if remote:
1154 1155 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1155 1156 tracking['refs/remotes/%s/%s' %
1156 1157 (remote, ref.split('/', 2)[2])] = b
1157 1158 return tracking
1158 1159
1159 1160 def _abssource(self, source):
1160 1161 if '://' not in source:
1161 1162 # recognize the scp syntax as an absolute source
1162 1163 colon = source.find(':')
1163 1164 if colon != -1 and '/' not in source[:colon]:
1164 1165 return source
1165 1166 self._subsource = source
1166 1167 return _abssource(self)
1167 1168
1168 1169 def _fetch(self, source, revision):
1169 1170 if self._gitmissing():
1170 1171 source = self._abssource(source)
1171 1172 self._ui.status(_('cloning subrepo %s from %s\n') %
1172 1173 (self._relpath, source))
1173 1174 self._gitnodir(['clone', source, self._abspath])
1174 1175 if self._githavelocally(revision):
1175 1176 return
1176 1177 self._ui.status(_('pulling subrepo %s from %s\n') %
1177 1178 (self._relpath, self._gitremote('origin')))
1178 1179 # try only origin: the originally cloned repo
1179 1180 self._gitcommand(['fetch'])
1180 1181 if not self._githavelocally(revision):
1181 1182 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
1182 1183 (revision, self._relpath))
1183 1184
1184 1185 @annotatesubrepoerror
1185 1186 def dirty(self, ignoreupdate=False):
1186 1187 if self._gitmissing():
1187 1188 return self._state[1] != ''
1188 1189 if self._gitisbare():
1189 1190 return True
1190 1191 if not ignoreupdate and self._state[1] != self._gitstate():
1191 1192 # different version checked out
1192 1193 return True
1193 1194 # check for staged changes or modified files; ignore untracked files
1194 1195 self._gitupdatestat()
1195 1196 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1196 1197 return code == 1
1197 1198
1198 1199 def basestate(self):
1199 1200 return self._gitstate()
1200 1201
1201 1202 @annotatesubrepoerror
1202 1203 def get(self, state, overwrite=False):
1203 1204 source, revision, kind = state
1204 1205 if not revision:
1205 1206 self.remove()
1206 1207 return
1207 1208 self._fetch(source, revision)
1208 1209 # if the repo was set to be bare, unbare it
1209 1210 if self._gitisbare():
1210 1211 self._gitcommand(['config', 'core.bare', 'false'])
1211 1212 if self._gitstate() == revision:
1212 1213 self._gitcommand(['reset', '--hard', 'HEAD'])
1213 1214 return
1214 1215 elif self._gitstate() == revision:
1215 1216 if overwrite:
1216 1217 # first reset the index to unmark new files for commit, because
1217 1218 # reset --hard will otherwise throw away files added for commit,
1218 1219 # not just unmark them.
1219 1220 self._gitcommand(['reset', 'HEAD'])
1220 1221 self._gitcommand(['reset', '--hard', 'HEAD'])
1221 1222 return
1222 1223 branch2rev, rev2branch = self._gitbranchmap()
1223 1224
1224 1225 def checkout(args):
1225 1226 cmd = ['checkout']
1226 1227 if overwrite:
1227 1228 # first reset the index to unmark new files for commit, because
1228 1229 # the -f option will otherwise throw away files added for
1229 1230 # commit, not just unmark them.
1230 1231 self._gitcommand(['reset', 'HEAD'])
1231 1232 cmd.append('-f')
1232 1233 self._gitcommand(cmd + args)
1233 1234
1234 1235 def rawcheckout():
1235 1236 # no branch to checkout, check it out with no branch
1236 1237 self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1237 1238 self._relpath)
1238 1239 self._ui.warn(_('check out a git branch if you intend '
1239 1240 'to make changes\n'))
1240 1241 checkout(['-q', revision])
1241 1242
1242 1243 if revision not in rev2branch:
1243 1244 rawcheckout()
1244 1245 return
1245 1246 branches = rev2branch[revision]
1246 1247 firstlocalbranch = None
1247 1248 for b in branches:
1248 1249 if b == 'refs/heads/master':
1249 1250 # master trumps all other branches
1250 1251 checkout(['refs/heads/master'])
1251 1252 return
1252 1253 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1253 1254 firstlocalbranch = b
1254 1255 if firstlocalbranch:
1255 1256 checkout([firstlocalbranch])
1256 1257 return
1257 1258
1258 1259 tracking = self._gittracking(branch2rev.keys())
1259 1260 # choose a remote branch already tracked if possible
1260 1261 remote = branches[0]
1261 1262 if remote not in tracking:
1262 1263 for b in branches:
1263 1264 if b in tracking:
1264 1265 remote = b
1265 1266 break
1266 1267
1267 1268 if remote not in tracking:
1268 1269 # create a new local tracking branch
1269 1270 local = remote.split('/', 3)[3]
1270 1271 checkout(['-b', local, remote])
1271 1272 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1272 1273 # When updating to a tracked remote branch,
1273 1274 # if the local tracking branch is downstream of it,
1274 1275 # a normal `git pull` would have performed a "fast-forward merge"
1275 1276 # which is equivalent to updating the local branch to the remote.
1276 1277 # Since we are only looking at branching at update, we need to
1277 1278 # detect this situation and perform this action lazily.
1278 1279 if tracking[remote] != self._gitcurrentbranch():
1279 1280 checkout([tracking[remote]])
1280 1281 self._gitcommand(['merge', '--ff', remote])
1281 1282 else:
1282 1283 # a real merge would be required, just checkout the revision
1283 1284 rawcheckout()
1284 1285
1285 1286 @annotatesubrepoerror
1286 1287 def commit(self, text, user, date):
1287 1288 if self._gitmissing():
1288 1289 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1289 1290 cmd = ['commit', '-a', '-m', text]
1290 1291 env = os.environ.copy()
1291 1292 if user:
1292 1293 cmd += ['--author', user]
1293 1294 if date:
1294 1295 # git's date parser silently ignores when seconds < 1e9
1295 1296 # convert to ISO8601
1296 1297 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1297 1298 '%Y-%m-%dT%H:%M:%S %1%2')
1298 1299 self._gitcommand(cmd, env=env)
1299 1300 # make sure commit works otherwise HEAD might not exist under certain
1300 1301 # circumstances
1301 1302 return self._gitstate()
1302 1303
1303 1304 @annotatesubrepoerror
1304 1305 def merge(self, state):
1305 1306 source, revision, kind = state
1306 1307 self._fetch(source, revision)
1307 1308 base = self._gitcommand(['merge-base', revision, self._state[1]])
1308 1309 self._gitupdatestat()
1309 1310 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1310 1311
1311 1312 def mergefunc():
1312 1313 if base == revision:
1313 1314 self.get(state) # fast forward merge
1314 1315 elif base != self._state[1]:
1315 1316 self._gitcommand(['merge', '--no-commit', revision])
1316 1317
1317 1318 if self.dirty():
1318 1319 if self._gitstate() != revision:
1319 1320 dirty = self._gitstate() == self._state[1] or code != 0
1320 1321 if _updateprompt(self._ui, self, dirty,
1321 1322 self._state[1][:7], revision[:7]):
1322 1323 mergefunc()
1323 1324 else:
1324 1325 mergefunc()
1325 1326
1326 1327 @annotatesubrepoerror
1327 1328 def push(self, opts):
1328 1329 force = opts.get('force')
1329 1330
1330 1331 if not self._state[1]:
1331 1332 return True
1332 1333 if self._gitmissing():
1333 1334 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1334 1335 # if a branch in origin contains the revision, nothing to do
1335 1336 branch2rev, rev2branch = self._gitbranchmap()
1336 1337 if self._state[1] in rev2branch:
1337 1338 for b in rev2branch[self._state[1]]:
1338 1339 if b.startswith('refs/remotes/origin/'):
1339 1340 return True
1340 1341 for b, revision in branch2rev.iteritems():
1341 1342 if b.startswith('refs/remotes/origin/'):
1342 1343 if self._gitisancestor(self._state[1], revision):
1343 1344 return True
1344 1345 # otherwise, try to push the currently checked out branch
1345 1346 cmd = ['push']
1346 1347 if force:
1347 1348 cmd.append('--force')
1348 1349
1349 1350 current = self._gitcurrentbranch()
1350 1351 if current:
1351 1352 # determine if the current branch is even useful
1352 1353 if not self._gitisancestor(self._state[1], current):
1353 1354 self._ui.warn(_('unrelated git branch checked out '
1354 1355 'in subrepo %s\n') % self._relpath)
1355 1356 return False
1356 1357 self._ui.status(_('pushing branch %s of subrepo %s\n') %
1357 1358 (current.split('/', 2)[2], self._relpath))
1358 1359 self._gitcommand(cmd + ['origin', current])
1359 1360 return True
1360 1361 else:
1361 1362 self._ui.warn(_('no branch checked out in subrepo %s\n'
1362 1363 'cannot push revision %s\n') %
1363 1364 (self._relpath, self._state[1]))
1364 1365 return False
1365 1366
1366 1367 @annotatesubrepoerror
1367 1368 def remove(self):
1368 1369 if self._gitmissing():
1369 1370 return
1370 1371 if self.dirty():
1371 1372 self._ui.warn(_('not removing repo %s because '
1372 1373 'it has changes.\n') % self._relpath)
1373 1374 return
1374 1375 # we can't fully delete the repository as it may contain
1375 1376 # local-only history
1376 1377 self._ui.note(_('removing subrepo %s\n') % self._relpath)
1377 1378 self._gitcommand(['config', 'core.bare', 'true'])
1378 1379 for f in os.listdir(self._abspath):
1379 1380 if f == '.git':
1380 1381 continue
1381 1382 path = os.path.join(self._abspath, f)
1382 1383 if os.path.isdir(path) and not os.path.islink(path):
1383 1384 shutil.rmtree(path)
1384 1385 else:
1385 1386 os.remove(path)
1386 1387
1387 1388 def archive(self, ui, archiver, prefix, match=None):
1388 1389 total = 0
1389 1390 source, revision = self._state
1390 1391 if not revision:
1391 1392 return total
1392 1393 self._fetch(source, revision)
1393 1394
1394 1395 # Parse git's native archive command.
1395 1396 # This should be much faster than manually traversing the trees
1396 1397 # and objects with many subprocess calls.
1397 1398 tarstream = self._gitcommand(['archive', revision], stream=True)
1398 1399 tar = tarfile.open(fileobj=tarstream, mode='r|')
1399 1400 relpath = subrelpath(self)
1400 1401 ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1401 1402 for i, info in enumerate(tar):
1402 1403 if info.isdir():
1403 1404 continue
1404 1405 if match and not match(info.name):
1405 1406 continue
1406 1407 if info.issym():
1407 1408 data = info.linkname
1408 1409 else:
1409 1410 data = tar.extractfile(info).read()
1410 1411 archiver.addfile(os.path.join(prefix, self._path, info.name),
1411 1412 info.mode, info.issym(), data)
1412 1413 total += 1
1413 1414 ui.progress(_('archiving (%s)') % relpath, i + 1,
1414 1415 unit=_('files'))
1415 1416 ui.progress(_('archiving (%s)') % relpath, None)
1416 1417 return total
1417 1418
1418 1419
1419 1420 @annotatesubrepoerror
1420 1421 def status(self, rev2, **opts):
1421 1422 rev1 = self._state[1]
1422 1423 if self._gitmissing() or not rev1:
1423 1424 # if the repo is missing, return no results
1424 1425 return [], [], [], [], [], [], []
1425 1426 modified, added, removed = [], [], []
1426 1427 self._gitupdatestat()
1427 1428 if rev2:
1428 1429 command = ['diff-tree', rev1, rev2]
1429 1430 else:
1430 1431 command = ['diff-index', rev1]
1431 1432 out = self._gitcommand(command)
1432 1433 for line in out.split('\n'):
1433 1434 tab = line.find('\t')
1434 1435 if tab == -1:
1435 1436 continue
1436 1437 status, f = line[tab - 1], line[tab + 1:]
1437 1438 if status == 'M':
1438 1439 modified.append(f)
1439 1440 elif status == 'A':
1440 1441 added.append(f)
1441 1442 elif status == 'D':
1442 1443 removed.append(f)
1443 1444
1444 1445 deleted = unknown = ignored = clean = []
1445 1446 return modified, added, removed, deleted, unknown, ignored, clean
1446 1447
1447 1448 types = {
1448 1449 'hg': hgsubrepo,
1449 1450 'svn': svnsubrepo,
1450 1451 'git': gitsubrepo,
1451 1452 }
@@ -1,807 +1,814 b''
1 1 # ui.py - user interface bits for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 import errno, getpass, os, socket, sys, tempfile, traceback
10 10 import config, scmutil, util, error, formatter
11 11
12 12 class ui(object):
13 13 def __init__(self, src=None):
14 14 self._buffers = []
15 15 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
16 16 self._reportuntrusted = True
17 17 self._ocfg = config.config() # overlay
18 18 self._tcfg = config.config() # trusted
19 19 self._ucfg = config.config() # untrusted
20 20 self._trustusers = set()
21 21 self._trustgroups = set()
22 22 self.callhooks = True
23 23
24 24 if src:
25 25 self.fout = src.fout
26 26 self.ferr = src.ferr
27 27 self.fin = src.fin
28 28
29 29 self._tcfg = src._tcfg.copy()
30 30 self._ucfg = src._ucfg.copy()
31 31 self._ocfg = src._ocfg.copy()
32 32 self._trustusers = src._trustusers.copy()
33 33 self._trustgroups = src._trustgroups.copy()
34 34 self.environ = src.environ
35 35 self.callhooks = src.callhooks
36 36 self.fixconfig()
37 37 else:
38 38 self.fout = sys.stdout
39 39 self.ferr = sys.stderr
40 40 self.fin = sys.stdin
41 41
42 42 # shared read-only environment
43 43 self.environ = os.environ
44 44 # we always trust global config files
45 45 for f in scmutil.rcpath():
46 46 self.readconfig(f, trust=True)
47 47
48 48 def copy(self):
49 49 return self.__class__(self)
50 50
51 51 def formatter(self, topic, opts):
52 52 return formatter.formatter(self, topic, opts)
53 53
54 54 def _trusted(self, fp, f):
55 55 st = util.fstat(fp)
56 56 if util.isowner(st):
57 57 return True
58 58
59 59 tusers, tgroups = self._trustusers, self._trustgroups
60 60 if '*' in tusers or '*' in tgroups:
61 61 return True
62 62
63 63 user = util.username(st.st_uid)
64 64 group = util.groupname(st.st_gid)
65 65 if user in tusers or group in tgroups or user == util.username():
66 66 return True
67 67
68 68 if self._reportuntrusted:
69 69 self.warn(_('not trusting file %s from untrusted '
70 70 'user %s, group %s\n') % (f, user, group))
71 71 return False
72 72
73 73 def readconfig(self, filename, root=None, trust=False,
74 74 sections=None, remap=None):
75 75 try:
76 76 fp = open(filename)
77 77 except IOError:
78 78 if not sections: # ignore unless we were looking for something
79 79 return
80 80 raise
81 81
82 82 cfg = config.config()
83 83 trusted = sections or trust or self._trusted(fp, filename)
84 84
85 85 try:
86 86 cfg.read(filename, fp, sections=sections, remap=remap)
87 87 fp.close()
88 88 except error.ConfigError, inst:
89 89 if trusted:
90 90 raise
91 91 self.warn(_("ignored: %s\n") % str(inst))
92 92
93 93 if self.plain():
94 94 for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
95 95 'logtemplate', 'style',
96 96 'traceback', 'verbose'):
97 97 if k in cfg['ui']:
98 98 del cfg['ui'][k]
99 99 for k, v in cfg.items('defaults'):
100 100 del cfg['defaults'][k]
101 101 # Don't remove aliases from the configuration if in the exceptionlist
102 102 if self.plain('alias'):
103 103 for k, v in cfg.items('alias'):
104 104 del cfg['alias'][k]
105 105
106 106 if trusted:
107 107 self._tcfg.update(cfg)
108 108 self._tcfg.update(self._ocfg)
109 109 self._ucfg.update(cfg)
110 110 self._ucfg.update(self._ocfg)
111 111
112 112 if root is None:
113 113 root = os.path.expanduser('~')
114 114 self.fixconfig(root=root)
115 115
116 116 def fixconfig(self, root=None, section=None):
117 117 if section in (None, 'paths'):
118 118 # expand vars and ~
119 119 # translate paths relative to root (or home) into absolute paths
120 120 root = root or os.getcwd()
121 121 for c in self._tcfg, self._ucfg, self._ocfg:
122 122 for n, p in c.items('paths'):
123 123 if not p:
124 124 continue
125 125 if '%%' in p:
126 126 self.warn(_("(deprecated '%%' in path %s=%s from %s)\n")
127 127 % (n, p, self.configsource('paths', n)))
128 128 p = p.replace('%%', '%')
129 129 p = util.expandpath(p)
130 130 if not util.hasscheme(p) and not os.path.isabs(p):
131 131 p = os.path.normpath(os.path.join(root, p))
132 132 c.set("paths", n, p)
133 133
134 134 if section in (None, 'ui'):
135 135 # update ui options
136 136 self.debugflag = self.configbool('ui', 'debug')
137 137 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
138 138 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
139 139 if self.verbose and self.quiet:
140 140 self.quiet = self.verbose = False
141 141 self._reportuntrusted = self.debugflag or self.configbool("ui",
142 142 "report_untrusted", True)
143 143 self.tracebackflag = self.configbool('ui', 'traceback', False)
144 144
145 145 if section in (None, 'trusted'):
146 146 # update trust information
147 147 self._trustusers.update(self.configlist('trusted', 'users'))
148 148 self._trustgroups.update(self.configlist('trusted', 'groups'))
149 149
150 150 def backupconfig(self, section, item):
151 151 return (self._ocfg.backup(section, item),
152 152 self._tcfg.backup(section, item),
153 153 self._ucfg.backup(section, item),)
154 154 def restoreconfig(self, data):
155 155 self._ocfg.restore(data[0])
156 156 self._tcfg.restore(data[1])
157 157 self._ucfg.restore(data[2])
158 158
159 159 def setconfig(self, section, name, value, overlay=True):
160 160 if overlay:
161 161 self._ocfg.set(section, name, value)
162 162 self._tcfg.set(section, name, value)
163 163 self._ucfg.set(section, name, value)
164 164 self.fixconfig(section=section)
165 165
166 166 def _data(self, untrusted):
167 167 return untrusted and self._ucfg or self._tcfg
168 168
169 169 def configsource(self, section, name, untrusted=False):
170 170 return self._data(untrusted).source(section, name) or 'none'
171 171
172 172 def config(self, section, name, default=None, untrusted=False):
173 173 if isinstance(name, list):
174 174 alternates = name
175 175 else:
176 176 alternates = [name]
177 177
178 178 for n in alternates:
179 179 value = self._data(untrusted).get(section, name, None)
180 180 if value is not None:
181 181 name = n
182 182 break
183 183 else:
184 184 value = default
185 185
186 186 if self.debugflag and not untrusted and self._reportuntrusted:
187 187 uvalue = self._ucfg.get(section, name)
188 188 if uvalue is not None and uvalue != value:
189 189 self.debug("ignoring untrusted configuration option "
190 190 "%s.%s = %s\n" % (section, name, uvalue))
191 191 return value
192 192
193 193 def configpath(self, section, name, default=None, untrusted=False):
194 194 'get a path config item, expanded relative to repo root or config file'
195 195 v = self.config(section, name, default, untrusted)
196 196 if v is None:
197 197 return None
198 198 if not os.path.isabs(v) or "://" not in v:
199 199 src = self.configsource(section, name, untrusted)
200 200 if ':' in src:
201 201 base = os.path.dirname(src.rsplit(':')[0])
202 202 v = os.path.join(base, os.path.expanduser(v))
203 203 return v
204 204
205 205 def configbool(self, section, name, default=False, untrusted=False):
206 206 """parse a configuration element as a boolean
207 207
208 208 >>> u = ui(); s = 'foo'
209 209 >>> u.setconfig(s, 'true', 'yes')
210 210 >>> u.configbool(s, 'true')
211 211 True
212 212 >>> u.setconfig(s, 'false', 'no')
213 213 >>> u.configbool(s, 'false')
214 214 False
215 215 >>> u.configbool(s, 'unknown')
216 216 False
217 217 >>> u.configbool(s, 'unknown', True)
218 218 True
219 219 >>> u.setconfig(s, 'invalid', 'somevalue')
220 220 >>> u.configbool(s, 'invalid')
221 221 Traceback (most recent call last):
222 222 ...
223 223 ConfigError: foo.invalid is not a boolean ('somevalue')
224 224 """
225 225
226 226 v = self.config(section, name, None, untrusted)
227 227 if v is None:
228 228 return default
229 229 if isinstance(v, bool):
230 230 return v
231 231 b = util.parsebool(v)
232 232 if b is None:
233 233 raise error.ConfigError(_("%s.%s is not a boolean ('%s')")
234 234 % (section, name, v))
235 235 return b
236 236
237 237 def configint(self, section, name, default=None, untrusted=False):
238 238 """parse a configuration element as an integer
239 239
240 240 >>> u = ui(); s = 'foo'
241 241 >>> u.setconfig(s, 'int1', '42')
242 242 >>> u.configint(s, 'int1')
243 243 42
244 244 >>> u.setconfig(s, 'int2', '-42')
245 245 >>> u.configint(s, 'int2')
246 246 -42
247 247 >>> u.configint(s, 'unknown', 7)
248 248 7
249 249 >>> u.setconfig(s, 'invalid', 'somevalue')
250 250 >>> u.configint(s, 'invalid')
251 251 Traceback (most recent call last):
252 252 ...
253 253 ConfigError: foo.invalid is not an integer ('somevalue')
254 254 """
255 255
256 256 v = self.config(section, name, None, untrusted)
257 257 if v is None:
258 258 return default
259 259 try:
260 260 return int(v)
261 261 except ValueError:
262 262 raise error.ConfigError(_("%s.%s is not an integer ('%s')")
263 263 % (section, name, v))
264 264
265 265 def configbytes(self, section, name, default=0, untrusted=False):
266 266 """parse a configuration element as a quantity in bytes
267 267
268 268 Units can be specified as b (bytes), k or kb (kilobytes), m or
269 269 mb (megabytes), g or gb (gigabytes).
270 270
271 271 >>> u = ui(); s = 'foo'
272 272 >>> u.setconfig(s, 'val1', '42')
273 273 >>> u.configbytes(s, 'val1')
274 274 42
275 275 >>> u.setconfig(s, 'val2', '42.5 kb')
276 276 >>> u.configbytes(s, 'val2')
277 277 43520
278 278 >>> u.configbytes(s, 'unknown', '7 MB')
279 279 7340032
280 280 >>> u.setconfig(s, 'invalid', 'somevalue')
281 281 >>> u.configbytes(s, 'invalid')
282 282 Traceback (most recent call last):
283 283 ...
284 284 ConfigError: foo.invalid is not a byte quantity ('somevalue')
285 285 """
286 286
287 287 value = self.config(section, name)
288 288 if value is None:
289 289 if not isinstance(default, str):
290 290 return default
291 291 value = default
292 292 try:
293 293 return util.sizetoint(value)
294 294 except error.ParseError:
295 295 raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')")
296 296 % (section, name, value))
297 297
298 298 def configlist(self, section, name, default=None, untrusted=False):
299 299 """parse a configuration element as a list of comma/space separated
300 300 strings
301 301
302 302 >>> u = ui(); s = 'foo'
303 303 >>> u.setconfig(s, 'list1', 'this,is "a small" ,test')
304 304 >>> u.configlist(s, 'list1')
305 305 ['this', 'is', 'a small', 'test']
306 306 """
307 307
308 308 def _parse_plain(parts, s, offset):
309 309 whitespace = False
310 310 while offset < len(s) and (s[offset].isspace() or s[offset] == ','):
311 311 whitespace = True
312 312 offset += 1
313 313 if offset >= len(s):
314 314 return None, parts, offset
315 315 if whitespace:
316 316 parts.append('')
317 317 if s[offset] == '"' and not parts[-1]:
318 318 return _parse_quote, parts, offset + 1
319 319 elif s[offset] == '"' and parts[-1][-1] == '\\':
320 320 parts[-1] = parts[-1][:-1] + s[offset]
321 321 return _parse_plain, parts, offset + 1
322 322 parts[-1] += s[offset]
323 323 return _parse_plain, parts, offset + 1
324 324
325 325 def _parse_quote(parts, s, offset):
326 326 if offset < len(s) and s[offset] == '"': # ""
327 327 parts.append('')
328 328 offset += 1
329 329 while offset < len(s) and (s[offset].isspace() or
330 330 s[offset] == ','):
331 331 offset += 1
332 332 return _parse_plain, parts, offset
333 333
334 334 while offset < len(s) and s[offset] != '"':
335 335 if (s[offset] == '\\' and offset + 1 < len(s)
336 336 and s[offset + 1] == '"'):
337 337 offset += 1
338 338 parts[-1] += '"'
339 339 else:
340 340 parts[-1] += s[offset]
341 341 offset += 1
342 342
343 343 if offset >= len(s):
344 344 real_parts = _configlist(parts[-1])
345 345 if not real_parts:
346 346 parts[-1] = '"'
347 347 else:
348 348 real_parts[0] = '"' + real_parts[0]
349 349 parts = parts[:-1]
350 350 parts.extend(real_parts)
351 351 return None, parts, offset
352 352
353 353 offset += 1
354 354 while offset < len(s) and s[offset] in [' ', ',']:
355 355 offset += 1
356 356
357 357 if offset < len(s):
358 358 if offset + 1 == len(s) and s[offset] == '"':
359 359 parts[-1] += '"'
360 360 offset += 1
361 361 else:
362 362 parts.append('')
363 363 else:
364 364 return None, parts, offset
365 365
366 366 return _parse_plain, parts, offset
367 367
368 368 def _configlist(s):
369 369 s = s.rstrip(' ,')
370 370 if not s:
371 371 return []
372 372 parser, parts, offset = _parse_plain, [''], 0
373 373 while parser:
374 374 parser, parts, offset = parser(parts, s, offset)
375 375 return parts
376 376
377 377 result = self.config(section, name, untrusted=untrusted)
378 378 if result is None:
379 379 result = default or []
380 380 if isinstance(result, basestring):
381 381 result = _configlist(result.lstrip(' ,\n'))
382 382 if result is None:
383 383 result = default or []
384 384 return result
385 385
386 386 def has_section(self, section, untrusted=False):
387 387 '''tell whether section exists in config.'''
388 388 return section in self._data(untrusted)
389 389
390 390 def configitems(self, section, untrusted=False):
391 391 items = self._data(untrusted).items(section)
392 392 if self.debugflag and not untrusted and self._reportuntrusted:
393 393 for k, v in self._ucfg.items(section):
394 394 if self._tcfg.get(section, k) != v:
395 395 self.debug("ignoring untrusted configuration option "
396 396 "%s.%s = %s\n" % (section, k, v))
397 397 return items
398 398
399 399 def walkconfig(self, untrusted=False):
400 400 cfg = self._data(untrusted)
401 401 for section in cfg.sections():
402 402 for name, value in self.configitems(section, untrusted):
403 403 yield section, name, value
404 404
405 405 def plain(self, feature=None):
406 406 '''is plain mode active?
407 407
408 408 Plain mode means that all configuration variables which affect
409 409 the behavior and output of Mercurial should be
410 410 ignored. Additionally, the output should be stable,
411 411 reproducible and suitable for use in scripts or applications.
412 412
413 413 The only way to trigger plain mode is by setting either the
414 414 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
415 415
416 416 The return value can either be
417 417 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
418 418 - True otherwise
419 419 '''
420 420 if 'HGPLAIN' not in os.environ and 'HGPLAINEXCEPT' not in os.environ:
421 421 return False
422 422 exceptions = os.environ.get('HGPLAINEXCEPT', '').strip().split(',')
423 423 if feature and exceptions:
424 424 return feature not in exceptions
425 425 return True
426 426
427 427 def username(self):
428 428 """Return default username to be used in commits.
429 429
430 430 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
431 431 and stop searching if one of these is set.
432 432 If not found and ui.askusername is True, ask the user, else use
433 433 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
434 434 """
435 435 user = os.environ.get("HGUSER")
436 436 if user is None:
437 437 user = self.config("ui", "username")
438 438 if user is not None:
439 439 user = os.path.expandvars(user)
440 440 if user is None:
441 441 user = os.environ.get("EMAIL")
442 442 if user is None and self.configbool("ui", "askusername"):
443 443 user = self.prompt(_("enter a commit username:"), default=None)
444 444 if user is None and not self.interactive():
445 445 try:
446 446 user = '%s@%s' % (util.getuser(), socket.getfqdn())
447 447 self.warn(_("no username found, using '%s' instead\n") % user)
448 448 except KeyError:
449 449 pass
450 450 if not user:
451 451 raise util.Abort(_('no username supplied (see "hg help config")'))
452 452 if "\n" in user:
453 453 raise util.Abort(_("username %s contains a newline\n") % repr(user))
454 454 return user
455 455
456 456 def shortuser(self, user):
457 457 """Return a short representation of a user name or email address."""
458 458 if not self.verbose:
459 459 user = util.shortuser(user)
460 460 return user
461 461
462 462 def expandpath(self, loc, default=None):
463 463 """Return repository location relative to cwd or from [paths]"""
464 464 if util.hasscheme(loc) or os.path.isdir(os.path.join(loc, '.hg')):
465 465 return loc
466 466
467 467 path = self.config('paths', loc)
468 468 if not path and default is not None:
469 469 path = self.config('paths', default)
470 470 return path or loc
471 471
472 472 def pushbuffer(self):
473 473 self._buffers.append([])
474 474
475 475 def popbuffer(self, labeled=False):
476 476 '''pop the last buffer and return the buffered output
477 477
478 478 If labeled is True, any labels associated with buffered
479 479 output will be handled. By default, this has no effect
480 480 on the output returned, but extensions and GUI tools may
481 481 handle this argument and returned styled output. If output
482 482 is being buffered so it can be captured and parsed or
483 483 processed, labeled should not be set to True.
484 484 '''
485 485 return "".join(self._buffers.pop())
486 486
487 487 def write(self, *args, **opts):
488 488 '''write args to output
489 489
490 490 By default, this method simply writes to the buffer or stdout,
491 491 but extensions or GUI tools may override this method,
492 492 write_err(), popbuffer(), and label() to style output from
493 493 various parts of hg.
494 494
495 495 An optional keyword argument, "label", can be passed in.
496 496 This should be a string containing label names separated by
497 497 space. Label names take the form of "topic.type". For example,
498 498 ui.debug() issues a label of "ui.debug".
499 499
500 500 When labeling output for a specific command, a label of
501 501 "cmdname.type" is recommended. For example, status issues
502 502 a label of "status.modified" for modified files.
503 503 '''
504 504 if self._buffers:
505 505 self._buffers[-1].extend([str(a) for a in args])
506 506 else:
507 507 for a in args:
508 508 self.fout.write(str(a))
509 509
510 510 def write_err(self, *args, **opts):
511 511 try:
512 512 if not getattr(self.fout, 'closed', False):
513 513 self.fout.flush()
514 514 for a in args:
515 515 self.ferr.write(str(a))
516 516 # stderr may be buffered under win32 when redirected to files,
517 517 # including stdout.
518 518 if not getattr(self.ferr, 'closed', False):
519 519 self.ferr.flush()
520 520 except IOError, inst:
521 521 if inst.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
522 522 raise
523 523
524 524 def flush(self):
525 525 try: self.fout.flush()
526 526 except (IOError, ValueError): pass
527 527 try: self.ferr.flush()
528 528 except (IOError, ValueError): pass
529 529
530 530 def _isatty(self, fh):
531 531 if self.configbool('ui', 'nontty', False):
532 532 return False
533 533 return util.isatty(fh)
534 534
535 535 def interactive(self):
536 536 '''is interactive input allowed?
537 537
538 538 An interactive session is a session where input can be reasonably read
539 539 from `sys.stdin'. If this function returns false, any attempt to read
540 540 from stdin should fail with an error, unless a sensible default has been
541 541 specified.
542 542
543 543 Interactiveness is triggered by the value of the `ui.interactive'
544 544 configuration variable or - if it is unset - when `sys.stdin' points
545 545 to a terminal device.
546 546
547 547 This function refers to input only; for output, see `ui.formatted()'.
548 548 '''
549 549 i = self.configbool("ui", "interactive", None)
550 550 if i is None:
551 551 # some environments replace stdin without implementing isatty
552 552 # usually those are non-interactive
553 553 return self._isatty(self.fin)
554 554
555 555 return i
556 556
557 557 def termwidth(self):
558 558 '''how wide is the terminal in columns?
559 559 '''
560 560 if 'COLUMNS' in os.environ:
561 561 try:
562 562 return int(os.environ['COLUMNS'])
563 563 except ValueError:
564 564 pass
565 565 return util.termwidth()
566 566
567 567 def formatted(self):
568 568 '''should formatted output be used?
569 569
570 570 It is often desirable to format the output to suite the output medium.
571 571 Examples of this are truncating long lines or colorizing messages.
572 572 However, this is not often not desirable when piping output into other
573 573 utilities, e.g. `grep'.
574 574
575 575 Formatted output is triggered by the value of the `ui.formatted'
576 576 configuration variable or - if it is unset - when `sys.stdout' points
577 577 to a terminal device. Please note that `ui.formatted' should be
578 578 considered an implementation detail; it is not intended for use outside
579 579 Mercurial or its extensions.
580 580
581 581 This function refers to output only; for input, see `ui.interactive()'.
582 582 This function always returns false when in plain mode, see `ui.plain()'.
583 583 '''
584 584 if self.plain():
585 585 return False
586 586
587 587 i = self.configbool("ui", "formatted", None)
588 588 if i is None:
589 589 # some environments replace stdout without implementing isatty
590 590 # usually those are non-interactive
591 591 return self._isatty(self.fout)
592 592
593 593 return i
594 594
595 595 def _readline(self, prompt=''):
596 596 if self._isatty(self.fin):
597 597 try:
598 598 # magically add command line editing support, where
599 599 # available
600 600 import readline
601 601 # force demandimport to really load the module
602 602 readline.read_history_file
603 603 # windows sometimes raises something other than ImportError
604 604 except Exception:
605 605 pass
606 606
607 607 # call write() so output goes through subclassed implementation
608 608 # e.g. color extension on Windows
609 609 self.write(prompt)
610 610
611 611 # instead of trying to emulate raw_input, swap (self.fin,
612 612 # self.fout) with (sys.stdin, sys.stdout)
613 613 oldin = sys.stdin
614 614 oldout = sys.stdout
615 615 sys.stdin = self.fin
616 616 sys.stdout = self.fout
617 617 line = raw_input(' ')
618 618 sys.stdin = oldin
619 619 sys.stdout = oldout
620 620
621 621 # When stdin is in binary mode on Windows, it can cause
622 622 # raw_input() to emit an extra trailing carriage return
623 623 if os.linesep == '\r\n' and line and line[-1] == '\r':
624 624 line = line[:-1]
625 625 return line
626 626
627 627 def prompt(self, msg, default="y"):
628 628 """Prompt user with msg, read response.
629 629 If ui is not interactive, the default is returned.
630 630 """
631 631 if not self.interactive():
632 632 self.write(msg, ' ', default, "\n")
633 633 return default
634 634 try:
635 635 r = self._readline(self.label(msg, 'ui.prompt'))
636 636 if not r:
637 637 return default
638 638 return r
639 639 except EOFError:
640 640 raise util.Abort(_('response expected'))
641 641
642 def promptchoice(self, msg, choices, default=0):
643 """Prompt user with msg, read response, and ensure it matches
644 one of the provided choices. The index of the choice is returned.
645 choices is a sequence of acceptable responses with the format:
646 ('&None', 'E&xec', 'Sym&link') Responses are case insensitive.
647 If ui is not interactive, the default is returned.
642 def promptchoice(self, prompt, default=0):
643 """Prompt user with a message, read response, and ensure it matches
644 one of the provided choices. The prompt is formatted as follows:
645
646 "would you like fries with that (Yn)? $$ &Yes $$ &No"
647
648 The index of the choice is returned. Responses are case
649 insensitive. If ui is not interactive, the default is
650 returned.
648 651 """
652
653 parts = prompt.split('$$')
654 msg = parts[0].rstrip(' ')
655 choices = [p.strip(' ') for p in parts[1:]]
649 656 resps = [s[s.index('&') + 1].lower() for s in choices]
650 657 while True:
651 658 r = self.prompt(msg, resps[default])
652 659 if r.lower() in resps:
653 660 return resps.index(r.lower())
654 661 self.write(_("unrecognized response\n"))
655 662
656 663 def getpass(self, prompt=None, default=None):
657 664 if not self.interactive():
658 665 return default
659 666 try:
660 667 self.write(self.label(prompt or _('password: '), 'ui.prompt'))
661 668 return getpass.getpass('')
662 669 except EOFError:
663 670 raise util.Abort(_('response expected'))
664 671 def status(self, *msg, **opts):
665 672 '''write status message to output (if ui.quiet is False)
666 673
667 674 This adds an output label of "ui.status".
668 675 '''
669 676 if not self.quiet:
670 677 opts['label'] = opts.get('label', '') + ' ui.status'
671 678 self.write(*msg, **opts)
672 679 def warn(self, *msg, **opts):
673 680 '''write warning message to output (stderr)
674 681
675 682 This adds an output label of "ui.warning".
676 683 '''
677 684 opts['label'] = opts.get('label', '') + ' ui.warning'
678 685 self.write_err(*msg, **opts)
679 686 def note(self, *msg, **opts):
680 687 '''write note to output (if ui.verbose is True)
681 688
682 689 This adds an output label of "ui.note".
683 690 '''
684 691 if self.verbose:
685 692 opts['label'] = opts.get('label', '') + ' ui.note'
686 693 self.write(*msg, **opts)
687 694 def debug(self, *msg, **opts):
688 695 '''write debug message to output (if ui.debugflag is True)
689 696
690 697 This adds an output label of "ui.debug".
691 698 '''
692 699 if self.debugflag:
693 700 opts['label'] = opts.get('label', '') + ' ui.debug'
694 701 self.write(*msg, **opts)
695 702 def edit(self, text, user):
696 703 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
697 704 text=True)
698 705 try:
699 706 f = os.fdopen(fd, "w")
700 707 f.write(text)
701 708 f.close()
702 709
703 710 editor = self.geteditor()
704 711
705 712 util.system("%s \"%s\"" % (editor, name),
706 713 environ={'HGUSER': user},
707 714 onerr=util.Abort, errprefix=_("edit failed"),
708 715 out=self.fout)
709 716
710 717 f = open(name)
711 718 t = f.read()
712 719 f.close()
713 720 finally:
714 721 os.unlink(name)
715 722
716 723 return t
717 724
718 725 def traceback(self, exc=None, force=False):
719 726 '''print exception traceback if traceback printing enabled or forced.
720 727 only to call in exception handler. returns true if traceback
721 728 printed.'''
722 729 if self.tracebackflag or force:
723 730 if exc is None:
724 731 exc = sys.exc_info()
725 732 cause = getattr(exc[1], 'cause', None)
726 733
727 734 if cause is not None:
728 735 causetb = traceback.format_tb(cause[2])
729 736 exctb = traceback.format_tb(exc[2])
730 737 exconly = traceback.format_exception_only(cause[0], cause[1])
731 738
732 739 # exclude frame where 'exc' was chained and rethrown from exctb
733 740 self.write_err('Traceback (most recent call last):\n',
734 741 ''.join(exctb[:-1]),
735 742 ''.join(causetb),
736 743 ''.join(exconly))
737 744 else:
738 745 traceback.print_exception(exc[0], exc[1], exc[2],
739 746 file=self.ferr)
740 747 return self.tracebackflag or force
741 748
742 749 def geteditor(self):
743 750 '''return editor to use'''
744 751 if sys.platform == 'plan9':
745 752 # vi is the MIPS instruction simulator on Plan 9. We
746 753 # instead default to E to plumb commit messages to
747 754 # avoid confusion.
748 755 editor = 'E'
749 756 else:
750 757 editor = 'vi'
751 758 return (os.environ.get("HGEDITOR") or
752 759 self.config("ui", "editor") or
753 760 os.environ.get("VISUAL") or
754 761 os.environ.get("EDITOR", editor))
755 762
756 763 def progress(self, topic, pos, item="", unit="", total=None):
757 764 '''show a progress message
758 765
759 766 With stock hg, this is simply a debug message that is hidden
760 767 by default, but with extensions or GUI tools it may be
761 768 visible. 'topic' is the current operation, 'item' is a
762 769 non-numeric marker of the current position (i.e. the currently
763 770 in-process file), 'pos' is the current numeric position (i.e.
764 771 revision, bytes, etc.), unit is a corresponding unit label,
765 772 and total is the highest expected pos.
766 773
767 774 Multiple nested topics may be active at a time.
768 775
769 776 All topics should be marked closed by setting pos to None at
770 777 termination.
771 778 '''
772 779
773 780 if pos is None or not self.debugflag:
774 781 return
775 782
776 783 if unit:
777 784 unit = ' ' + unit
778 785 if item:
779 786 item = ' ' + item
780 787
781 788 if total:
782 789 pct = 100.0 * pos / total
783 790 self.debug('%s:%s %s/%s%s (%4.2f%%)\n'
784 791 % (topic, item, pos, total, unit, pct))
785 792 else:
786 793 self.debug('%s:%s %s%s\n' % (topic, item, pos, unit))
787 794
788 795 def log(self, service, *msg, **opts):
789 796 '''hook for logging facility extensions
790 797
791 798 service should be a readily-identifiable subsystem, which will
792 799 allow filtering.
793 800 message should be a newline-terminated string to log.
794 801 '''
795 802 pass
796 803
797 804 def label(self, msg, label):
798 805 '''style msg based on supplied label
799 806
800 807 Like ui.write(), this just returns msg unchanged, but extensions
801 808 and GUI tools can override it to allow styling output without
802 809 writing it.
803 810
804 811 ui.write(s, 'label') is equivalent to
805 812 ui.write(ui.label(s, 'label')).
806 813 '''
807 814 return msg
General Comments 0
You need to be logged in to leave comments. Login now