Show More
@@ -1,753 +1,751 | |||
|
1 | 1 | # context.py - changeset and file context objects for mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms |
|
6 | 6 | # of the GNU General Public License, incorporated herein by reference. |
|
7 | 7 | |
|
8 | 8 | from node import nullid, nullrev, short |
|
9 | 9 | from i18n import _ |
|
10 | 10 | import ancestor, bdiff, revlog, util, os, errno |
|
11 | 11 | |
|
12 | 12 | class changectx(object): |
|
13 | 13 | """A changecontext object makes access to data related to a particular |
|
14 | 14 | changeset convenient.""" |
|
15 | 15 | def __init__(self, repo, changeid=''): |
|
16 | 16 | """changeid is a revision number, node, or tag""" |
|
17 | 17 | if changeid == '': |
|
18 | 18 | changeid = '.' |
|
19 | 19 | self._repo = repo |
|
20 | 20 | self._node = self._repo.lookup(changeid) |
|
21 | 21 | self._rev = self._repo.changelog.rev(self._node) |
|
22 | 22 | |
|
23 | 23 | def __str__(self): |
|
24 | 24 | return short(self.node()) |
|
25 | 25 | |
|
26 | 26 | def __repr__(self): |
|
27 | 27 | return "<changectx %s>" % str(self) |
|
28 | 28 | |
|
29 | 29 | def __hash__(self): |
|
30 | 30 | try: |
|
31 | 31 | return hash(self._rev) |
|
32 | 32 | except AttributeError: |
|
33 | 33 | return id(self) |
|
34 | 34 | |
|
35 | 35 | def __eq__(self, other): |
|
36 | 36 | try: |
|
37 | 37 | return self._rev == other._rev |
|
38 | 38 | except AttributeError: |
|
39 | 39 | return False |
|
40 | 40 | |
|
41 | 41 | def __ne__(self, other): |
|
42 | 42 | return not (self == other) |
|
43 | 43 | |
|
44 | 44 | def __nonzero__(self): |
|
45 | 45 | return self._rev != nullrev |
|
46 | 46 | |
|
47 | 47 | def __getattr__(self, name): |
|
48 | 48 | if name == '_changeset': |
|
49 | 49 | self._changeset = self._repo.changelog.read(self.node()) |
|
50 | 50 | return self._changeset |
|
51 | 51 | elif name == '_manifest': |
|
52 | 52 | self._manifest = self._repo.manifest.read(self._changeset[0]) |
|
53 | 53 | return self._manifest |
|
54 | 54 | elif name == '_manifestdelta': |
|
55 | 55 | md = self._repo.manifest.readdelta(self._changeset[0]) |
|
56 | 56 | self._manifestdelta = md |
|
57 | 57 | return self._manifestdelta |
|
58 | 58 | elif name == '_parents': |
|
59 | 59 | p = self._repo.changelog.parents(self._node) |
|
60 | 60 | if p[1] == nullid: |
|
61 | 61 | p = p[:-1] |
|
62 | 62 | self._parents = [changectx(self._repo, x) for x in p] |
|
63 | 63 | return self._parents |
|
64 | 64 | else: |
|
65 | 65 | raise AttributeError, name |
|
66 | 66 | |
|
67 | 67 | def __contains__(self, key): |
|
68 | 68 | return key in self._manifest |
|
69 | 69 | |
|
70 | 70 | def __getitem__(self, key): |
|
71 | 71 | return self.filectx(key) |
|
72 | 72 | |
|
73 | 73 | def __iter__(self): |
|
74 | 74 | a = self._manifest.keys() |
|
75 | 75 | a.sort() |
|
76 | 76 | for f in a: |
|
77 | 77 | yield f |
|
78 | 78 | |
|
79 | 79 | def changeset(self): return self._changeset |
|
80 | 80 | def manifest(self): return self._manifest |
|
81 | 81 | |
|
82 | 82 | def rev(self): return self._rev |
|
83 | 83 | def node(self): return self._node |
|
84 | 84 | def user(self): return self._changeset[1] |
|
85 | 85 | def date(self): return self._changeset[2] |
|
86 | 86 | def files(self): return self._changeset[3] |
|
87 | 87 | def description(self): return self._changeset[4] |
|
88 | 88 | def branch(self): return self._changeset[5].get("branch") |
|
89 | 89 | def extra(self): return self._changeset[5] |
|
90 | 90 | def tags(self): return self._repo.nodetags(self._node) |
|
91 | 91 | |
|
92 | 92 | def parents(self): |
|
93 | 93 | """return contexts for each parent changeset""" |
|
94 | 94 | return self._parents |
|
95 | 95 | |
|
96 | 96 | def children(self): |
|
97 | 97 | """return contexts for each child changeset""" |
|
98 | 98 | c = self._repo.changelog.children(self._node) |
|
99 | 99 | return [changectx(self._repo, x) for x in c] |
|
100 | 100 | |
|
101 | 101 | def _fileinfo(self, path): |
|
102 | 102 | if '_manifest' in self.__dict__: |
|
103 | 103 | try: |
|
104 | 104 | return self._manifest[path], self._manifest.flags(path) |
|
105 | 105 | except KeyError: |
|
106 | 106 | raise revlog.LookupError(self._node, path, |
|
107 | 107 | _('not found in manifest')) |
|
108 | 108 | if '_manifestdelta' in self.__dict__ or path in self.files(): |
|
109 | 109 | if path in self._manifestdelta: |
|
110 | 110 | return self._manifestdelta[path], self._manifestdelta.flags(path) |
|
111 | 111 | node, flag = self._repo.manifest.find(self._changeset[0], path) |
|
112 | 112 | if not node: |
|
113 | 113 | raise revlog.LookupError(self._node, path, |
|
114 | 114 | _('not found in manifest')) |
|
115 | 115 | |
|
116 | 116 | return node, flag |
|
117 | 117 | |
|
118 | 118 | def filenode(self, path): |
|
119 | 119 | return self._fileinfo(path)[0] |
|
120 | 120 | |
|
121 | 121 | def flags(self, path): |
|
122 | 122 | try: |
|
123 | 123 | return self._fileinfo(path)[1] |
|
124 | 124 | except revlog.LookupError: |
|
125 | 125 | return '' |
|
126 | 126 | |
|
127 | 127 | def filectx(self, path, fileid=None, filelog=None): |
|
128 | 128 | """get a file context from this changeset""" |
|
129 | 129 | if fileid is None: |
|
130 | 130 | fileid = self.filenode(path) |
|
131 | 131 | return filectx(self._repo, path, fileid=fileid, |
|
132 | 132 | changectx=self, filelog=filelog) |
|
133 | 133 | |
|
134 | 134 | def filectxs(self): |
|
135 | 135 | """generate a file context for each file in this changeset's |
|
136 | 136 | manifest""" |
|
137 | 137 | mf = self.manifest() |
|
138 | 138 | m = mf.keys() |
|
139 | 139 | m.sort() |
|
140 | 140 | for f in m: |
|
141 | 141 | yield self.filectx(f, fileid=mf[f]) |
|
142 | 142 | |
|
143 | 143 | def ancestor(self, c2): |
|
144 | 144 | """ |
|
145 | 145 | return the ancestor context of self and c2 |
|
146 | 146 | """ |
|
147 | 147 | n = self._repo.changelog.ancestor(self._node, c2._node) |
|
148 | 148 | return changectx(self._repo, n) |
|
149 | 149 | |
|
150 | 150 | class filectx(object): |
|
151 | 151 | """A filecontext object makes access to data related to a particular |
|
152 | 152 | filerevision convenient.""" |
|
153 | 153 | def __init__(self, repo, path, changeid=None, fileid=None, |
|
154 | 154 | filelog=None, changectx=None): |
|
155 | 155 | """changeid can be a changeset revision, node, or tag. |
|
156 | 156 | fileid can be a file revision or node.""" |
|
157 | 157 | self._repo = repo |
|
158 | 158 | self._path = path |
|
159 | 159 | |
|
160 | 160 | assert (changeid is not None |
|
161 | 161 | or fileid is not None |
|
162 | 162 | or changectx is not None) |
|
163 | 163 | |
|
164 | 164 | if filelog: |
|
165 | 165 | self._filelog = filelog |
|
166 | 166 | |
|
167 | 167 | if changeid is not None: |
|
168 | 168 | self._changeid = changeid |
|
169 | 169 | if changectx is not None: |
|
170 | 170 | self._changectx = changectx |
|
171 | 171 | if fileid is not None: |
|
172 | 172 | self._fileid = fileid |
|
173 | 173 | |
|
174 | 174 | def __getattr__(self, name): |
|
175 | 175 | if name == '_changectx': |
|
176 | 176 | self._changectx = changectx(self._repo, self._changeid) |
|
177 | 177 | return self._changectx |
|
178 | 178 | elif name == '_filelog': |
|
179 | 179 | self._filelog = self._repo.file(self._path) |
|
180 | 180 | return self._filelog |
|
181 | 181 | elif name == '_changeid': |
|
182 | 182 | if '_changectx' in self.__dict__: |
|
183 | 183 | self._changeid = self._changectx.rev() |
|
184 | 184 | else: |
|
185 | 185 | self._changeid = self._filelog.linkrev(self._filenode) |
|
186 | 186 | return self._changeid |
|
187 | 187 | elif name == '_filenode': |
|
188 | 188 | if '_fileid' in self.__dict__: |
|
189 | 189 | self._filenode = self._filelog.lookup(self._fileid) |
|
190 | 190 | else: |
|
191 | 191 | self._filenode = self._changectx.filenode(self._path) |
|
192 | 192 | return self._filenode |
|
193 | 193 | elif name == '_filerev': |
|
194 | 194 | self._filerev = self._filelog.rev(self._filenode) |
|
195 | 195 | return self._filerev |
|
196 | 196 | elif name == '_repopath': |
|
197 | 197 | self._repopath = self._path |
|
198 | 198 | return self._repopath |
|
199 | 199 | else: |
|
200 | 200 | raise AttributeError, name |
|
201 | 201 | |
|
202 | 202 | def __nonzero__(self): |
|
203 | 203 | try: |
|
204 | 204 | n = self._filenode |
|
205 | 205 | return True |
|
206 | 206 | except revlog.LookupError: |
|
207 | 207 | # file is missing |
|
208 | 208 | return False |
|
209 | 209 | |
|
210 | 210 | def __str__(self): |
|
211 | 211 | return "%s@%s" % (self.path(), short(self.node())) |
|
212 | 212 | |
|
213 | 213 | def __repr__(self): |
|
214 | 214 | return "<filectx %s>" % str(self) |
|
215 | 215 | |
|
216 | 216 | def __hash__(self): |
|
217 | 217 | try: |
|
218 | 218 | return hash((self._path, self._fileid)) |
|
219 | 219 | except AttributeError: |
|
220 | 220 | return id(self) |
|
221 | 221 | |
|
222 | 222 | def __eq__(self, other): |
|
223 | 223 | try: |
|
224 | 224 | return (self._path == other._path |
|
225 | 225 | and self._fileid == other._fileid) |
|
226 | 226 | except AttributeError: |
|
227 | 227 | return False |
|
228 | 228 | |
|
229 | 229 | def __ne__(self, other): |
|
230 | 230 | return not (self == other) |
|
231 | 231 | |
|
232 | 232 | def filectx(self, fileid): |
|
233 | 233 | '''opens an arbitrary revision of the file without |
|
234 | 234 | opening a new filelog''' |
|
235 | 235 | return filectx(self._repo, self._path, fileid=fileid, |
|
236 | 236 | filelog=self._filelog) |
|
237 | 237 | |
|
238 | 238 | def filerev(self): return self._filerev |
|
239 | 239 | def filenode(self): return self._filenode |
|
240 | 240 | def flags(self): return self._changectx.flags(self._path) |
|
241 | def isexec(self): return 'x' in self.flags() | |
|
242 | def islink(self): return 'l' in self.flags() | |
|
243 | 241 | def filelog(self): return self._filelog |
|
244 | 242 | |
|
245 | 243 | def rev(self): |
|
246 | 244 | if '_changectx' in self.__dict__: |
|
247 | 245 | return self._changectx.rev() |
|
248 | 246 | if '_changeid' in self.__dict__: |
|
249 | 247 | return self._changectx.rev() |
|
250 | 248 | return self._filelog.linkrev(self._filenode) |
|
251 | 249 | |
|
252 | 250 | def linkrev(self): return self._filelog.linkrev(self._filenode) |
|
253 | 251 | def node(self): return self._changectx.node() |
|
254 | 252 | def user(self): return self._changectx.user() |
|
255 | 253 | def date(self): return self._changectx.date() |
|
256 | 254 | def files(self): return self._changectx.files() |
|
257 | 255 | def description(self): return self._changectx.description() |
|
258 | 256 | def branch(self): return self._changectx.branch() |
|
259 | 257 | def manifest(self): return self._changectx.manifest() |
|
260 | 258 | def changectx(self): return self._changectx |
|
261 | 259 | |
|
262 | 260 | def data(self): return self._filelog.read(self._filenode) |
|
263 | 261 | def path(self): return self._path |
|
264 | 262 | def size(self): return self._filelog.size(self._filerev) |
|
265 | 263 | |
|
266 | 264 | def cmp(self, text): return self._filelog.cmp(self._filenode, text) |
|
267 | 265 | |
|
268 | 266 | def renamed(self): |
|
269 | 267 | """check if file was actually renamed in this changeset revision |
|
270 | 268 | |
|
271 | 269 | If rename logged in file revision, we report copy for changeset only |
|
272 | 270 | if file revisions linkrev points back to the changeset in question |
|
273 | 271 | or both changeset parents contain different file revisions. |
|
274 | 272 | """ |
|
275 | 273 | |
|
276 | 274 | renamed = self._filelog.renamed(self._filenode) |
|
277 | 275 | if not renamed: |
|
278 | 276 | return renamed |
|
279 | 277 | |
|
280 | 278 | if self.rev() == self.linkrev(): |
|
281 | 279 | return renamed |
|
282 | 280 | |
|
283 | 281 | name = self.path() |
|
284 | 282 | fnode = self._filenode |
|
285 | 283 | for p in self._changectx.parents(): |
|
286 | 284 | try: |
|
287 | 285 | if fnode == p.filenode(name): |
|
288 | 286 | return None |
|
289 | 287 | except revlog.LookupError: |
|
290 | 288 | pass |
|
291 | 289 | return renamed |
|
292 | 290 | |
|
293 | 291 | def parents(self): |
|
294 | 292 | p = self._path |
|
295 | 293 | fl = self._filelog |
|
296 | 294 | pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)] |
|
297 | 295 | |
|
298 | 296 | r = self._filelog.renamed(self._filenode) |
|
299 | 297 | if r: |
|
300 | 298 | pl[0] = (r[0], r[1], None) |
|
301 | 299 | |
|
302 | 300 | return [filectx(self._repo, p, fileid=n, filelog=l) |
|
303 | 301 | for p,n,l in pl if n != nullid] |
|
304 | 302 | |
|
305 | 303 | def children(self): |
|
306 | 304 | # hard for renames |
|
307 | 305 | c = self._filelog.children(self._filenode) |
|
308 | 306 | return [filectx(self._repo, self._path, fileid=x, |
|
309 | 307 | filelog=self._filelog) for x in c] |
|
310 | 308 | |
|
311 | 309 | def annotate(self, follow=False, linenumber=None): |
|
312 | 310 | '''returns a list of tuples of (ctx, line) for each line |
|
313 | 311 | in the file, where ctx is the filectx of the node where |
|
314 | 312 | that line was last changed. |
|
315 | 313 | This returns tuples of ((ctx, linenumber), line) for each line, |
|
316 | 314 | if "linenumber" parameter is NOT "None". |
|
317 | 315 | In such tuples, linenumber means one at the first appearance |
|
318 | 316 | in the managed file. |
|
319 | 317 | To reduce annotation cost, |
|
320 | 318 | this returns fixed value(False is used) as linenumber, |
|
321 | 319 | if "linenumber" parameter is "False".''' |
|
322 | 320 | |
|
323 | 321 | def decorate_compat(text, rev): |
|
324 | 322 | return ([rev] * len(text.splitlines()), text) |
|
325 | 323 | |
|
326 | 324 | def without_linenumber(text, rev): |
|
327 | 325 | return ([(rev, False)] * len(text.splitlines()), text) |
|
328 | 326 | |
|
329 | 327 | def with_linenumber(text, rev): |
|
330 | 328 | size = len(text.splitlines()) |
|
331 | 329 | return ([(rev, i) for i in xrange(1, size + 1)], text) |
|
332 | 330 | |
|
333 | 331 | decorate = (((linenumber is None) and decorate_compat) or |
|
334 | 332 | (linenumber and with_linenumber) or |
|
335 | 333 | without_linenumber) |
|
336 | 334 | |
|
337 | 335 | def pair(parent, child): |
|
338 | 336 | for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]): |
|
339 | 337 | child[0][b1:b2] = parent[0][a1:a2] |
|
340 | 338 | return child |
|
341 | 339 | |
|
342 | 340 | getlog = util.cachefunc(lambda x: self._repo.file(x)) |
|
343 | 341 | def getctx(path, fileid): |
|
344 | 342 | log = path == self._path and self._filelog or getlog(path) |
|
345 | 343 | return filectx(self._repo, path, fileid=fileid, filelog=log) |
|
346 | 344 | getctx = util.cachefunc(getctx) |
|
347 | 345 | |
|
348 | 346 | def parents(f): |
|
349 | 347 | # we want to reuse filectx objects as much as possible |
|
350 | 348 | p = f._path |
|
351 | 349 | if f._filerev is None: # working dir |
|
352 | 350 | pl = [(n.path(), n.filerev()) for n in f.parents()] |
|
353 | 351 | else: |
|
354 | 352 | pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)] |
|
355 | 353 | |
|
356 | 354 | if follow: |
|
357 | 355 | r = f.renamed() |
|
358 | 356 | if r: |
|
359 | 357 | pl[0] = (r[0], getlog(r[0]).rev(r[1])) |
|
360 | 358 | |
|
361 | 359 | return [getctx(p, n) for p, n in pl if n != nullrev] |
|
362 | 360 | |
|
363 | 361 | # use linkrev to find the first changeset where self appeared |
|
364 | 362 | if self.rev() != self.linkrev(): |
|
365 | 363 | base = self.filectx(self.filerev()) |
|
366 | 364 | else: |
|
367 | 365 | base = self |
|
368 | 366 | |
|
369 | 367 | # find all ancestors |
|
370 | 368 | needed = {base: 1} |
|
371 | 369 | visit = [base] |
|
372 | 370 | files = [base._path] |
|
373 | 371 | while visit: |
|
374 | 372 | f = visit.pop(0) |
|
375 | 373 | for p in parents(f): |
|
376 | 374 | if p not in needed: |
|
377 | 375 | needed[p] = 1 |
|
378 | 376 | visit.append(p) |
|
379 | 377 | if p._path not in files: |
|
380 | 378 | files.append(p._path) |
|
381 | 379 | else: |
|
382 | 380 | # count how many times we'll use this |
|
383 | 381 | needed[p] += 1 |
|
384 | 382 | |
|
385 | 383 | # sort by revision (per file) which is a topological order |
|
386 | 384 | visit = [] |
|
387 | 385 | for f in files: |
|
388 | 386 | fn = [(n.rev(), n) for n in needed.keys() if n._path == f] |
|
389 | 387 | visit.extend(fn) |
|
390 | 388 | visit.sort() |
|
391 | 389 | hist = {} |
|
392 | 390 | |
|
393 | 391 | for r, f in visit: |
|
394 | 392 | curr = decorate(f.data(), f) |
|
395 | 393 | for p in parents(f): |
|
396 | 394 | if p != nullid: |
|
397 | 395 | curr = pair(hist[p], curr) |
|
398 | 396 | # trim the history of unneeded revs |
|
399 | 397 | needed[p] -= 1 |
|
400 | 398 | if not needed[p]: |
|
401 | 399 | del hist[p] |
|
402 | 400 | hist[f] = curr |
|
403 | 401 | |
|
404 | 402 | return zip(hist[f][0], hist[f][1].splitlines(1)) |
|
405 | 403 | |
|
406 | 404 | def ancestor(self, fc2): |
|
407 | 405 | """ |
|
408 | 406 | find the common ancestor file context, if any, of self, and fc2 |
|
409 | 407 | """ |
|
410 | 408 | |
|
411 | 409 | acache = {} |
|
412 | 410 | |
|
413 | 411 | # prime the ancestor cache for the working directory |
|
414 | 412 | for c in (self, fc2): |
|
415 | 413 | if c._filerev == None: |
|
416 | 414 | pl = [(n.path(), n.filenode()) for n in c.parents()] |
|
417 | 415 | acache[(c._path, None)] = pl |
|
418 | 416 | |
|
419 | 417 | flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog} |
|
420 | 418 | def parents(vertex): |
|
421 | 419 | if vertex in acache: |
|
422 | 420 | return acache[vertex] |
|
423 | 421 | f, n = vertex |
|
424 | 422 | if f not in flcache: |
|
425 | 423 | flcache[f] = self._repo.file(f) |
|
426 | 424 | fl = flcache[f] |
|
427 | 425 | pl = [(f, p) for p in fl.parents(n) if p != nullid] |
|
428 | 426 | re = fl.renamed(n) |
|
429 | 427 | if re: |
|
430 | 428 | pl.append(re) |
|
431 | 429 | acache[vertex] = pl |
|
432 | 430 | return pl |
|
433 | 431 | |
|
434 | 432 | a, b = (self._path, self._filenode), (fc2._path, fc2._filenode) |
|
435 | 433 | v = ancestor.ancestor(a, b, parents) |
|
436 | 434 | if v: |
|
437 | 435 | f, n = v |
|
438 | 436 | return filectx(self._repo, f, fileid=n, filelog=flcache[f]) |
|
439 | 437 | |
|
440 | 438 | return None |
|
441 | 439 | |
|
442 | 440 | class workingctx(changectx): |
|
443 | 441 | """A workingctx object makes access to data related to |
|
444 | 442 | the current working directory convenient. |
|
445 | 443 | parents - a pair of parent nodeids, or None to use the dirstate. |
|
446 | 444 | date - any valid date string or (unixtime, offset), or None. |
|
447 | 445 | user - username string, or None. |
|
448 | 446 | extra - a dictionary of extra values, or None. |
|
449 | 447 | changes - a list of file lists as returned by localrepo.status() |
|
450 | 448 | or None to use the repository status. |
|
451 | 449 | """ |
|
452 | 450 | def __init__(self, repo, parents=None, text="", user=None, date=None, |
|
453 | 451 | extra=None, changes=None): |
|
454 | 452 | self._repo = repo |
|
455 | 453 | self._rev = None |
|
456 | 454 | self._node = None |
|
457 | 455 | self._text = text |
|
458 | 456 | if date: |
|
459 | 457 | self._date = util.parsedate(date) |
|
460 | 458 | else: |
|
461 | 459 | self._date = util.makedate() |
|
462 | 460 | if user: |
|
463 | 461 | self._user = user |
|
464 | 462 | else: |
|
465 | 463 | self._user = self._repo.ui.username() |
|
466 | 464 | if parents: |
|
467 | 465 | p1, p2 = parents |
|
468 | 466 | self._parents = [self._repo.changectx(p) for p in (p1, p2)] |
|
469 | 467 | if changes: |
|
470 | 468 | self._status = list(changes) |
|
471 | 469 | |
|
472 | 470 | self._extra = {} |
|
473 | 471 | if extra: |
|
474 | 472 | self._extra = extra.copy() |
|
475 | 473 | if 'branch' not in self._extra: |
|
476 | 474 | branch = self._repo.dirstate.branch() |
|
477 | 475 | try: |
|
478 | 476 | branch = branch.decode('UTF-8').encode('UTF-8') |
|
479 | 477 | except UnicodeDecodeError: |
|
480 | 478 | raise util.Abort(_('branch name not in UTF-8!')) |
|
481 | 479 | self._extra['branch'] = branch |
|
482 | 480 | if self._extra['branch'] == '': |
|
483 | 481 | self._extra['branch'] = 'default' |
|
484 | 482 | |
|
485 | 483 | def __str__(self): |
|
486 | 484 | return str(self._parents[0]) + "+" |
|
487 | 485 | |
|
488 | 486 | def __nonzero__(self): |
|
489 | 487 | return True |
|
490 | 488 | |
|
491 | 489 | def __getattr__(self, name): |
|
492 | 490 | if name == '_status': |
|
493 | 491 | self._status = self._repo.status() |
|
494 | 492 | return self._status |
|
495 | 493 | if name == '_manifest': |
|
496 | 494 | self._buildmanifest() |
|
497 | 495 | return self._manifest |
|
498 | 496 | elif name == '_parents': |
|
499 | 497 | p = self._repo.dirstate.parents() |
|
500 | 498 | if p[1] == nullid: |
|
501 | 499 | p = p[:-1] |
|
502 | 500 | self._parents = [changectx(self._repo, x) for x in p] |
|
503 | 501 | return self._parents |
|
504 | 502 | else: |
|
505 | 503 | raise AttributeError, name |
|
506 | 504 | |
|
507 | 505 | def _buildmanifest(self): |
|
508 | 506 | """generate a manifest corresponding to the working directory""" |
|
509 | 507 | |
|
510 | 508 | man = self._parents[0].manifest().copy() |
|
511 | 509 | copied = self._repo.dirstate.copies() |
|
512 | 510 | cf = lambda x: man.flags(copied.get(x, x)) |
|
513 | 511 | ff = self._repo.dirstate.flagfunc(cf) |
|
514 | 512 | modified, added, removed, deleted, unknown = self._status[:5] |
|
515 | 513 | for i, l in (("a", added), ("m", modified), ("u", unknown)): |
|
516 | 514 | for f in l: |
|
517 | 515 | man[f] = man.get(copied.get(f, f), nullid) + i |
|
518 | 516 | try: |
|
519 | 517 | man.set(f, ff(f)) |
|
520 | 518 | except OSError: |
|
521 | 519 | pass |
|
522 | 520 | |
|
523 | 521 | for f in deleted + removed: |
|
524 | 522 | if f in man: |
|
525 | 523 | del man[f] |
|
526 | 524 | |
|
527 | 525 | self._manifest = man |
|
528 | 526 | |
|
529 | 527 | def manifest(self): return self._manifest |
|
530 | 528 | |
|
531 | 529 | def user(self): return self._user |
|
532 | 530 | def date(self): return self._date |
|
533 | 531 | def description(self): return self._text |
|
534 | 532 | def files(self): |
|
535 | 533 | f = self.modified() + self.added() + self.removed() |
|
536 | 534 | f.sort() |
|
537 | 535 | return f |
|
538 | 536 | |
|
539 | 537 | def modified(self): return self._status[0] |
|
540 | 538 | def added(self): return self._status[1] |
|
541 | 539 | def removed(self): return self._status[2] |
|
542 | 540 | def deleted(self): return self._status[3] |
|
543 | 541 | def unknown(self): return self._status[4] |
|
544 | 542 | def clean(self): return self._status[5] |
|
545 | 543 | def branch(self): return self._extra['branch'] |
|
546 | 544 | def extra(self): return self._extra |
|
547 | 545 | |
|
548 | 546 | def tags(self): |
|
549 | 547 | t = [] |
|
550 | 548 | [t.extend(p.tags()) for p in self.parents()] |
|
551 | 549 | return t |
|
552 | 550 | |
|
553 | 551 | def children(self): |
|
554 | 552 | return [] |
|
555 | 553 | |
|
556 | 554 | def flags(self, path): |
|
557 | 555 | if '_manifest' in self.__dict__: |
|
558 | 556 | try: |
|
559 | 557 | return self._manifest.flags(path) |
|
560 | 558 | except KeyError: |
|
561 | 559 | return '' |
|
562 | 560 | |
|
563 | 561 | pnode = self._parents[0].changeset()[0] |
|
564 | 562 | orig = self._repo.dirstate.copies().get(path, path) |
|
565 | 563 | node, flag = self._repo.manifest.find(pnode, orig) |
|
566 | 564 | try: |
|
567 | 565 | ff = self._repo.dirstate.flagfunc(lambda x: flag or '') |
|
568 | 566 | return ff(path) |
|
569 | 567 | except OSError: |
|
570 | 568 | pass |
|
571 | 569 | |
|
572 | 570 | if not node or path in self.deleted() or path in self.removed(): |
|
573 | 571 | return '' |
|
574 | 572 | return flag |
|
575 | 573 | |
|
576 | 574 | def filectx(self, path, filelog=None): |
|
577 | 575 | """get a file context from the working directory""" |
|
578 | 576 | return workingfilectx(self._repo, path, workingctx=self, |
|
579 | 577 | filelog=filelog) |
|
580 | 578 | |
|
581 | 579 | def ancestor(self, c2): |
|
582 | 580 | """return the ancestor context of self and c2""" |
|
583 | 581 | return self._parents[0].ancestor(c2) # punt on two parents for now |
|
584 | 582 | |
|
585 | 583 | class workingfilectx(filectx): |
|
586 | 584 | """A workingfilectx object makes access to data related to a particular |
|
587 | 585 | file in the working directory convenient.""" |
|
588 | 586 | def __init__(self, repo, path, filelog=None, workingctx=None): |
|
589 | 587 | """changeid can be a changeset revision, node, or tag. |
|
590 | 588 | fileid can be a file revision or node.""" |
|
591 | 589 | self._repo = repo |
|
592 | 590 | self._path = path |
|
593 | 591 | self._changeid = None |
|
594 | 592 | self._filerev = self._filenode = None |
|
595 | 593 | |
|
596 | 594 | if filelog: |
|
597 | 595 | self._filelog = filelog |
|
598 | 596 | if workingctx: |
|
599 | 597 | self._changectx = workingctx |
|
600 | 598 | |
|
601 | 599 | def __getattr__(self, name): |
|
602 | 600 | if name == '_changectx': |
|
603 | 601 | self._changectx = workingctx(self._repo) |
|
604 | 602 | return self._changectx |
|
605 | 603 | elif name == '_repopath': |
|
606 | 604 | self._repopath = (self._repo.dirstate.copied(self._path) |
|
607 | 605 | or self._path) |
|
608 | 606 | return self._repopath |
|
609 | 607 | elif name == '_filelog': |
|
610 | 608 | self._filelog = self._repo.file(self._repopath) |
|
611 | 609 | return self._filelog |
|
612 | 610 | else: |
|
613 | 611 | raise AttributeError, name |
|
614 | 612 | |
|
615 | 613 | def __nonzero__(self): |
|
616 | 614 | return True |
|
617 | 615 | |
|
618 | 616 | def __str__(self): |
|
619 | 617 | return "%s@%s" % (self.path(), self._changectx) |
|
620 | 618 | |
|
621 | 619 | def filectx(self, fileid): |
|
622 | 620 | '''opens an arbitrary revision of the file without |
|
623 | 621 | opening a new filelog''' |
|
624 | 622 | return filectx(self._repo, self._repopath, fileid=fileid, |
|
625 | 623 | filelog=self._filelog) |
|
626 | 624 | |
|
627 | 625 | def rev(self): |
|
628 | 626 | if '_changectx' in self.__dict__: |
|
629 | 627 | return self._changectx.rev() |
|
630 | 628 | return self._filelog.linkrev(self._filenode) |
|
631 | 629 | |
|
632 | 630 | def data(self): return self._repo.wread(self._path) |
|
633 | 631 | def renamed(self): |
|
634 | 632 | rp = self._repopath |
|
635 | 633 | if rp == self._path: |
|
636 | 634 | return None |
|
637 | 635 | return rp, self._changectx._parents[0]._manifest.get(rp, nullid) |
|
638 | 636 | |
|
639 | 637 | def parents(self): |
|
640 | 638 | '''return parent filectxs, following copies if necessary''' |
|
641 | 639 | p = self._path |
|
642 | 640 | rp = self._repopath |
|
643 | 641 | pcl = self._changectx._parents |
|
644 | 642 | fl = self._filelog |
|
645 | 643 | pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)] |
|
646 | 644 | if len(pcl) > 1: |
|
647 | 645 | if rp != p: |
|
648 | 646 | fl = None |
|
649 | 647 | pl.append((p, pcl[1]._manifest.get(p, nullid), fl)) |
|
650 | 648 | |
|
651 | 649 | return [filectx(self._repo, p, fileid=n, filelog=l) |
|
652 | 650 | for p,n,l in pl if n != nullid] |
|
653 | 651 | |
|
654 | 652 | def children(self): |
|
655 | 653 | return [] |
|
656 | 654 | |
|
657 | 655 | def size(self): return os.stat(self._repo.wjoin(self._path)).st_size |
|
658 | 656 | def date(self): |
|
659 | 657 | t, tz = self._changectx.date() |
|
660 | 658 | try: |
|
661 | 659 | return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz) |
|
662 | 660 | except OSError, err: |
|
663 | 661 | if err.errno != errno.ENOENT: raise |
|
664 | 662 | return (t, tz) |
|
665 | 663 | |
|
666 | 664 | def cmp(self, text): return self._repo.wread(self._path) == text |
|
667 | 665 | |
|
668 | 666 | class memctx(object): |
|
669 | 667 | """A memctx is a subset of changectx supposed to be built on memory |
|
670 | 668 | and passed to commit functions. |
|
671 | 669 | |
|
672 | 670 | NOTE: this interface and the related memfilectx are experimental and |
|
673 | 671 | may change without notice. |
|
674 | 672 | |
|
675 | 673 | parents - a pair of parent nodeids. |
|
676 | 674 | filectxfn - a callable taking (repo, memctx, path) arguments and |
|
677 | 675 | returning a memctx object. |
|
678 | 676 | date - any valid date string or (unixtime, offset), or None. |
|
679 | 677 | user - username string, or None. |
|
680 | 678 | extra - a dictionary of extra values, or None. |
|
681 | 679 | """ |
|
682 | 680 | def __init__(self, repo, parents, text, files, filectxfn, user=None, |
|
683 | 681 | date=None, extra=None): |
|
684 | 682 | self._repo = repo |
|
685 | 683 | self._rev = None |
|
686 | 684 | self._node = None |
|
687 | 685 | self._text = text |
|
688 | 686 | self._date = date and util.parsedate(date) or util.makedate() |
|
689 | 687 | self._user = user or self._repo.ui.username() |
|
690 | 688 | parents = [(p or nullid) for p in parents] |
|
691 | 689 | p1, p2 = parents |
|
692 | 690 | self._parents = [self._repo.changectx(p) for p in (p1, p2)] |
|
693 | 691 | files = list(files) |
|
694 | 692 | files.sort() |
|
695 | 693 | self._status = [files, [], [], [], []] |
|
696 | 694 | self._filectxfn = filectxfn |
|
697 | 695 | |
|
698 | 696 | self._extra = extra and extra.copy() or {} |
|
699 | 697 | if 'branch' not in self._extra: |
|
700 | 698 | self._extra['branch'] = 'default' |
|
701 | 699 | elif self._extra.get('branch') == '': |
|
702 | 700 | self._extra['branch'] = 'default' |
|
703 | 701 | |
|
704 | 702 | def __str__(self): |
|
705 | 703 | return str(self._parents[0]) + "+" |
|
706 | 704 | |
|
707 | 705 | def __nonzero__(self): |
|
708 | 706 | return True |
|
709 | 707 | |
|
710 | 708 | def user(self): return self._user |
|
711 | 709 | def date(self): return self._date |
|
712 | 710 | def description(self): return self._text |
|
713 | 711 | def files(self): return self.modified() |
|
714 | 712 | def modified(self): return self._status[0] |
|
715 | 713 | def added(self): return self._status[1] |
|
716 | 714 | def removed(self): return self._status[2] |
|
717 | 715 | def deleted(self): return self._status[3] |
|
718 | 716 | def unknown(self): return self._status[4] |
|
719 | 717 | def clean(self): return self._status[5] |
|
720 | 718 | def branch(self): return self._extra['branch'] |
|
721 | 719 | def extra(self): return self._extra |
|
722 | 720 | def flags(self, f): return self[f].flags() |
|
723 | 721 | |
|
724 | 722 | def parents(self): |
|
725 | 723 | """return contexts for each parent changeset""" |
|
726 | 724 | return self._parents |
|
727 | 725 | |
|
728 | 726 | def filectx(self, path, filelog=None): |
|
729 | 727 | """get a file context from the working directory""" |
|
730 | 728 | return self._filectxfn(self._repo, self, path) |
|
731 | 729 | |
|
732 | 730 | class memfilectx(object): |
|
733 | 731 | """A memfilectx is a subset of filectx supposed to be built by client |
|
734 | 732 | code and passed to commit functions. |
|
735 | 733 | """ |
|
736 | 734 | def __init__(self, path, data, islink, isexec, copied): |
|
737 | 735 | """copied is the source file path, or None.""" |
|
738 | 736 | self._path = path |
|
739 | 737 | self._data = data |
|
740 | 738 | self._flags = (islink and 'l' or '') + (isexec and 'x' or '') |
|
741 | 739 | self._copied = None |
|
742 | 740 | if copied: |
|
743 | 741 | self._copied = (copied, nullid) |
|
744 | 742 | |
|
745 | 743 | def __nonzero__(self): return True |
|
746 | 744 | def __str__(self): return "%s@%s" % (self.path(), self._changectx) |
|
747 | 745 | def path(self): return self._path |
|
748 | 746 | def data(self): return self._data |
|
749 | 747 | def flags(self): return self._flags |
|
750 | 748 | def isexec(self): return 'x' in self._flags |
|
751 | 749 | def islink(self): return 'l' in self._flags |
|
752 | 750 | def renamed(self): return self._copied |
|
753 | 751 |
@@ -1,219 +1,219 | |||
|
1 | 1 | # filemerge.py - file-level merge handling for Mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms |
|
6 | 6 | # of the GNU General Public License, incorporated herein by reference. |
|
7 | 7 | |
|
8 | 8 | from node import nullrev, short |
|
9 | 9 | from i18n import _ |
|
10 | 10 | import util, os, tempfile, simplemerge, re, filecmp |
|
11 | 11 | |
|
12 | 12 | def _toolstr(ui, tool, part, default=""): |
|
13 | 13 | return ui.config("merge-tools", tool + "." + part, default) |
|
14 | 14 | |
|
15 | 15 | def _toolbool(ui, tool, part, default=False): |
|
16 | 16 | return ui.configbool("merge-tools", tool + "." + part, default) |
|
17 | 17 | |
|
18 | 18 | def _findtool(ui, tool): |
|
19 | 19 | if tool in ("internal:fail", "internal:local", "internal:other"): |
|
20 | 20 | return tool |
|
21 | 21 | k = _toolstr(ui, tool, "regkey") |
|
22 | 22 | if k: |
|
23 | 23 | p = util.lookup_reg(k, _toolstr(ui, tool, "regname")) |
|
24 | 24 | if p: |
|
25 | 25 | p = util.find_exe(p + _toolstr(ui, tool, "regappend")) |
|
26 | 26 | if p: |
|
27 | 27 | return p |
|
28 | 28 | return util.find_exe(_toolstr(ui, tool, "executable", tool)) |
|
29 | 29 | |
|
30 | 30 | def _picktool(repo, ui, path, binary, symlink): |
|
31 | 31 | def check(tool, pat, symlink, binary): |
|
32 | 32 | tmsg = tool |
|
33 | 33 | if pat: |
|
34 | 34 | tmsg += " specified for " + pat |
|
35 | 35 | if pat and not _findtool(ui, tool): # skip search if not matching |
|
36 | 36 | ui.warn(_("couldn't find merge tool %s\n") % tmsg) |
|
37 | 37 | elif symlink and not _toolbool(ui, tool, "symlink"): |
|
38 | 38 | ui.warn(_("tool %s can't handle symlinks\n") % tmsg) |
|
39 | 39 | elif binary and not _toolbool(ui, tool, "binary"): |
|
40 | 40 | ui.warn(_("tool %s can't handle binary\n") % tmsg) |
|
41 | 41 | elif not util.gui() and _toolbool(ui, tool, "gui"): |
|
42 | 42 | ui.warn(_("tool %s requires a GUI\n") % tmsg) |
|
43 | 43 | else: |
|
44 | 44 | return True |
|
45 | 45 | return False |
|
46 | 46 | |
|
47 | 47 | # HGMERGE takes precedence |
|
48 | 48 | hgmerge = os.environ.get("HGMERGE") |
|
49 | 49 | if hgmerge: |
|
50 | 50 | return (hgmerge, hgmerge) |
|
51 | 51 | |
|
52 | 52 | # then patterns |
|
53 | 53 | for pat, tool in ui.configitems("merge-patterns"): |
|
54 | 54 | mf = util.matcher(repo.root, "", [pat], [], [])[1] |
|
55 | 55 | if mf(path) and check(tool, pat, symlink, False): |
|
56 | 56 | toolpath = _findtool(ui, tool) |
|
57 | 57 | return (tool, '"' + toolpath + '"') |
|
58 | 58 | |
|
59 | 59 | # then merge tools |
|
60 | 60 | tools = {} |
|
61 | 61 | for k,v in ui.configitems("merge-tools"): |
|
62 | 62 | t = k.split('.')[0] |
|
63 | 63 | if t not in tools: |
|
64 | 64 | tools[t] = int(_toolstr(ui, t, "priority", "0")) |
|
65 | 65 | names = tools.keys() |
|
66 | 66 | tools = [(-p,t) for t,p in tools.items()] |
|
67 | 67 | tools.sort() |
|
68 | 68 | uimerge = ui.config("ui", "merge") |
|
69 | 69 | if uimerge: |
|
70 | 70 | if uimerge not in names: |
|
71 | 71 | return (uimerge, uimerge) |
|
72 | 72 | tools.insert(0, (None, uimerge)) # highest priority |
|
73 | 73 | tools.append((None, "hgmerge")) # the old default, if found |
|
74 | 74 | for p,t in tools: |
|
75 | 75 | toolpath = _findtool(ui, t) |
|
76 | 76 | if toolpath and check(t, None, symlink, binary): |
|
77 | 77 | return (t, '"' + toolpath + '"') |
|
78 | 78 | # internal merge as last resort |
|
79 | 79 | return (not (symlink or binary) and "internal:merge" or None, None) |
|
80 | 80 | |
|
81 | 81 | def _eoltype(data): |
|
82 | 82 | "Guess the EOL type of a file" |
|
83 | 83 | if '\0' in data: # binary |
|
84 | 84 | return None |
|
85 | 85 | if '\r\n' in data: # Windows |
|
86 | 86 | return '\r\n' |
|
87 | 87 | if '\r' in data: # Old Mac |
|
88 | 88 | return '\r' |
|
89 | 89 | if '\n' in data: # UNIX |
|
90 | 90 | return '\n' |
|
91 | 91 | return None # unknown |
|
92 | 92 | |
|
93 | 93 | def _matcheol(file, origfile): |
|
94 | 94 | "Convert EOL markers in a file to match origfile" |
|
95 | 95 | tostyle = _eoltype(open(origfile, "rb").read()) |
|
96 | 96 | if tostyle: |
|
97 | 97 | data = open(file, "rb").read() |
|
98 | 98 | style = _eoltype(data) |
|
99 | 99 | if style: |
|
100 | 100 | newdata = data.replace(style, tostyle) |
|
101 | 101 | if newdata != data: |
|
102 | 102 | open(file, "wb").write(newdata) |
|
103 | 103 | |
|
104 | 104 | def filemerge(repo, mynode, orig, fcd, fco, fca): |
|
105 | 105 | """perform a 3-way merge in the working directory |
|
106 | 106 | |
|
107 | 107 | mynode = parent node before merge |
|
108 | 108 | orig = original local filename before merge |
|
109 | 109 | fco = other file context |
|
110 | 110 | fca = ancestor file context |
|
111 | 111 | fcd = local file context for current/destination file |
|
112 | 112 | """ |
|
113 | 113 | |
|
114 | 114 | def temp(prefix, ctx): |
|
115 | 115 | pre = "%s~%s." % (os.path.basename(ctx.path()), prefix) |
|
116 | 116 | (fd, name) = tempfile.mkstemp(prefix=pre) |
|
117 | 117 | data = repo.wwritedata(ctx.path(), ctx.data()) |
|
118 | 118 | f = os.fdopen(fd, "wb") |
|
119 | 119 | f.write(data) |
|
120 | 120 | f.close() |
|
121 | 121 | return name |
|
122 | 122 | |
|
123 | 123 | def isbin(ctx): |
|
124 | 124 | try: |
|
125 | 125 | return util.binary(ctx.data()) |
|
126 | 126 | except IOError: |
|
127 | 127 | return False |
|
128 | 128 | |
|
129 | 129 | if not fco.cmp(fcd.data()): # files identical? |
|
130 | 130 | return None |
|
131 | 131 | |
|
132 | 132 | ui = repo.ui |
|
133 | 133 | fd = fcd.path() |
|
134 | 134 | binary = isbin(fcd) or isbin(fco) or isbin(fca) |
|
135 |
symlink = fcd. |
|
|
135 | symlink = 'l' in fcd.flags() + fco.flags() | |
|
136 | 136 | tool, toolpath = _picktool(repo, ui, fd, binary, symlink) |
|
137 | 137 | ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") % |
|
138 | 138 | (tool, fd, binary, symlink)) |
|
139 | 139 | |
|
140 | 140 | if not tool: |
|
141 | 141 | tool = "internal:local" |
|
142 | 142 | if ui.prompt(_(" no tool found to merge %s\n" |
|
143 | 143 | "keep (l)ocal or take (o)ther?") % fd, |
|
144 | 144 | _("[lo]"), _("l")) != _("l"): |
|
145 | 145 | tool = "internal:other" |
|
146 | 146 | if tool == "internal:local": |
|
147 | 147 | return 0 |
|
148 | 148 | if tool == "internal:other": |
|
149 | 149 | repo.wwrite(fd, fco.data(), fco.flags()) |
|
150 | 150 | return 0 |
|
151 | 151 | if tool == "internal:fail": |
|
152 | 152 | return 1 |
|
153 | 153 | |
|
154 | 154 | # do the actual merge |
|
155 | 155 | a = repo.wjoin(fd) |
|
156 | 156 | b = temp("base", fca) |
|
157 | 157 | c = temp("other", fco) |
|
158 | 158 | out = "" |
|
159 | 159 | back = a + ".orig" |
|
160 | 160 | util.copyfile(a, back) |
|
161 | 161 | |
|
162 | 162 | if orig != fco.path(): |
|
163 | 163 | repo.ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd)) |
|
164 | 164 | else: |
|
165 | 165 | repo.ui.status(_("merging %s\n") % fd) |
|
166 | 166 | |
|
167 | 167 | repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcd, fco, fca)) |
|
168 | 168 | |
|
169 | 169 | # do we attempt to simplemerge first? |
|
170 | 170 | if _toolbool(ui, tool, "premerge", not (binary or symlink)): |
|
171 | 171 | r = simplemerge.simplemerge(a, b, c, quiet=True) |
|
172 | 172 | if not r: |
|
173 | 173 | ui.debug(_(" premerge successful\n")) |
|
174 | 174 | os.unlink(back) |
|
175 | 175 | os.unlink(b) |
|
176 | 176 | os.unlink(c) |
|
177 | 177 | return 0 |
|
178 | 178 | util.copyfile(back, a) # restore from backup and try again |
|
179 | 179 | |
|
180 | 180 | env = dict(HG_FILE=fd, |
|
181 | 181 | HG_MY_NODE=short(mynode), |
|
182 | 182 | HG_OTHER_NODE=str(fco.changectx()), |
|
183 |
HG_MY_ISLINK=fcd. |
|
|
184 |
HG_OTHER_ISLINK=fco. |
|
|
185 |
HG_BASE_ISLINK=fca. |
|
|
183 | HG_MY_ISLINK='l' in fcd.flags(), | |
|
184 | HG_OTHER_ISLINK='l' in fco.flags(), | |
|
185 | HG_BASE_ISLINK='l' in fca.flags()) | |
|
186 | 186 | |
|
187 | 187 | if tool == "internal:merge": |
|
188 | 188 | r = simplemerge.simplemerge(a, b, c, label=['local', 'other']) |
|
189 | 189 | else: |
|
190 | 190 | args = _toolstr(ui, tool, "args", '$local $base $other') |
|
191 | 191 | if "$output" in args: |
|
192 | 192 | out, a = a, back # read input from backup, write to original |
|
193 | 193 | replace = dict(local=a, base=b, other=c, output=out) |
|
194 | 194 | args = re.sub("\$(local|base|other|output)", |
|
195 | 195 | lambda x: '"%s"' % replace[x.group()[1:]], args) |
|
196 | 196 | r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env) |
|
197 | 197 | |
|
198 | 198 | if not r and _toolbool(ui, tool, "checkconflicts"): |
|
199 | 199 | if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data()): |
|
200 | 200 | r = 1 |
|
201 | 201 | |
|
202 | 202 | if not r and _toolbool(ui, tool, "checkchanged"): |
|
203 | 203 | if filecmp.cmp(repo.wjoin(fd), back): |
|
204 | 204 | if ui.prompt(_(" output file %s appears unchanged\n" |
|
205 | 205 | "was merge successful (yn)?") % fd, |
|
206 | 206 | _("[yn]"), _("n")) != _("y"): |
|
207 | 207 | r = 1 |
|
208 | 208 | |
|
209 | 209 | if _toolbool(ui, tool, "fixeol"): |
|
210 | 210 | _matcheol(repo.wjoin(fd), back) |
|
211 | 211 | |
|
212 | 212 | if r: |
|
213 | 213 | repo.ui.warn(_("merging %s failed!\n") % fd) |
|
214 | 214 | else: |
|
215 | 215 | os.unlink(back) |
|
216 | 216 | |
|
217 | 217 | os.unlink(b) |
|
218 | 218 | os.unlink(c) |
|
219 | 219 | return r |
General Comments 0
You need to be logged in to leave comments.
Login now