##// END OF EJS Templates
diff: use copy smarts from copies.py
Matt Mackall -
r6275:fda369b5 default
parent child Browse files
Show More
@@ -1,131 +1,134 b''
1 # ancestor.py - generic DAG ancestor algorithm for mercurial
1 # ancestor.py - generic DAG ancestor algorithm for mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import heapq
8 import heapq
9
9
10 def ancestor(a, b, pfunc):
10 def ancestor(a, b, pfunc):
11 """
11 """
12 return the least common ancestor of nodes a and b or None if there
12 return the least common ancestor of nodes a and b or None if there
13 is no such ancestor.
13 is no such ancestor.
14
14
15 pfunc must return a list of parent vertices
15 pfunc must return a list of parent vertices
16 """
16 """
17
17
18 if a == b:
18 if a == b:
19 return a
19 return a
20
20
21 # find depth from root of all ancestors
21 # find depth from root of all ancestors
22 visit = [a, b]
22 visit = [a, b]
23 depth = {}
23 depth = {}
24 while visit:
24 while visit:
25 vertex = visit[-1]
25 vertex = visit[-1]
26 pl = pfunc(vertex)
26 pl = pfunc(vertex)
27 if not pl:
27 if not pl:
28 depth[vertex] = 0
28 depth[vertex] = 0
29 visit.pop()
29 visit.pop()
30 else:
30 else:
31 for p in pl:
31 for p in pl:
32 if p == a or p == b: # did we find a or b as a parent?
32 if p == a or p == b: # did we find a or b as a parent?
33 return p # we're done
33 return p # we're done
34 if p not in depth:
34 if p not in depth:
35 visit.append(p)
35 visit.append(p)
36 if visit[-1] == vertex:
36 if visit[-1] == vertex:
37 depth[vertex] = min([depth[p] for p in pl]) - 1
37 depth[vertex] = min([depth[p] for p in pl]) - 1
38 visit.pop()
38 visit.pop()
39
39
40 # traverse ancestors in order of decreasing distance from root
40 # traverse ancestors in order of decreasing distance from root
41 def ancestors(vertex):
41 def ancestors(vertex):
42 h = [(depth[vertex], vertex)]
42 h = [(depth[vertex], vertex)]
43 seen = {}
43 seen = {}
44 while h:
44 while h:
45 d, n = heapq.heappop(h)
45 d, n = heapq.heappop(h)
46 if n not in seen:
46 if n not in seen:
47 seen[n] = 1
47 seen[n] = 1
48 yield (d, n)
48 yield (d, n)
49 for p in pfunc(n):
49 for p in pfunc(n):
50 heapq.heappush(h, (depth[p], p))
50 heapq.heappush(h, (depth[p], p))
51
51
52 def generations(vertex):
52 def generations(vertex):
53 sg, s = None, {}
53 sg, s = None, {}
54 for g, v in ancestors(vertex):
54 for g, v in ancestors(vertex):
55 if g != sg:
55 if g != sg:
56 if sg:
56 if sg:
57 yield sg, s
57 yield sg, s
58 sg, s = g, {v:1}
58 sg, s = g, {v:1}
59 else:
59 else:
60 s[v] = 1
60 s[v] = 1
61 yield sg, s
61 yield sg, s
62
62
63 x = generations(a)
63 x = generations(a)
64 y = generations(b)
64 y = generations(b)
65 gx = x.next()
65 gx = x.next()
66 gy = y.next()
66 gy = y.next()
67
67
68 # increment each ancestor list until it is closer to root than
68 # increment each ancestor list until it is closer to root than
69 # the other, or they match
69 # the other, or they match
70 try:
70 try:
71 while 1:
71 while 1:
72 if gx[0] == gy[0]:
72 if gx[0] == gy[0]:
73 for v in gx[1]:
73 for v in gx[1]:
74 if v in gy[1]:
74 if v in gy[1]:
75 return v
75 return v
76 gy = y.next()
76 gy = y.next()
77 gx = x.next()
77 gx = x.next()
78 elif gx[0] > gy[0]:
78 elif gx[0] > gy[0]:
79 gy = y.next()
79 gy = y.next()
80 else:
80 else:
81 gx = x.next()
81 gx = x.next()
82 except StopIteration:
82 except StopIteration:
83 return None
83 return None
84
84
85 def symmetricdifference(a, b, pfunc):
85 def symmetricdifference(a, b, pfunc):
86 """symmetric difference of the sets of ancestors of a and b
86 """symmetric difference of the sets of ancestors of a and b
87
87
88 I.e. revisions that are ancestors of a or b, but not both.
88 I.e. revisions that are ancestors of a or b, but not both.
89 """
89 """
90 # basic idea:
90 # basic idea:
91 # - mark a and b with different colors
91 # - mark a and b with different colors
92 # - walk the graph in topological order with the help of a heap;
92 # - walk the graph in topological order with the help of a heap;
93 # for each revision r:
93 # for each revision r:
94 # - if r has only one color, we want to return it
94 # - if r has only one color, we want to return it
95 # - add colors[r] to its parents
95 # - add colors[r] to its parents
96 #
96 #
97 # We keep track of the number of revisions in the heap that
97 # We keep track of the number of revisions in the heap that
98 # we may be interested in. We stop walking the graph as soon
98 # we may be interested in. We stop walking the graph as soon
99 # as this number reaches 0.
99 # as this number reaches 0.
100 if a == b:
101 return [a]
102
100 WHITE = 1
103 WHITE = 1
101 BLACK = 2
104 BLACK = 2
102 ALLCOLORS = WHITE | BLACK
105 ALLCOLORS = WHITE | BLACK
103 colors = {a: WHITE, b: BLACK}
106 colors = {a: WHITE, b: BLACK}
104
107
105 visit = [-a, -b]
108 visit = [-a, -b]
106 heapq.heapify(visit)
109 heapq.heapify(visit)
107 n_wanted = len(visit)
110 n_wanted = len(visit)
108 ret = []
111 ret = []
109
112
110 while n_wanted:
113 while n_wanted:
111 r = -heapq.heappop(visit)
114 r = -heapq.heappop(visit)
112 wanted = colors[r] != ALLCOLORS
115 wanted = colors[r] != ALLCOLORS
113 n_wanted -= wanted
116 n_wanted -= wanted
114 if wanted:
117 if wanted:
115 ret.append(r)
118 ret.append(r)
116
119
117 for p in pfunc(r):
120 for p in pfunc(r):
118 if p not in colors:
121 if p not in colors:
119 # first time we see p; add it to visit
122 # first time we see p; add it to visit
120 n_wanted += wanted
123 n_wanted += wanted
121 colors[p] = colors[r]
124 colors[p] = colors[r]
122 heapq.heappush(visit, -p)
125 heapq.heappush(visit, -p)
123 elif colors[p] != ALLCOLORS and colors[p] != colors[r]:
126 elif colors[p] != ALLCOLORS and colors[p] != colors[r]:
124 # at first we thought we wanted p, but now
127 # at first we thought we wanted p, but now
125 # we know we don't really want it
128 # we know we don't really want it
126 n_wanted -= 1
129 n_wanted -= 1
127 colors[p] |= colors[r]
130 colors[p] |= colors[r]
128
131
129 del colors[r]
132 del colors[r]
130
133
131 return ret
134 return ret
@@ -1,193 +1,193 b''
1 # copies.py - copy detection for Mercurial
1 # copies.py - copy detection for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import nullid, nullrev
8 from node import nullid, nullrev
9 from i18n import _
9 from i18n import _
10 import util, ancestor
10 import util, ancestor
11
11
12 def _nonoverlap(d1, d2, d3):
12 def _nonoverlap(d1, d2, d3):
13 "Return list of elements in d1 not in d2 or d3"
13 "Return list of elements in d1 not in d2 or d3"
14 l = [d for d in d1 if d not in d3 and d not in d2]
14 l = [d for d in d1 if d not in d3 and d not in d2]
15 l.sort()
15 l.sort()
16 return l
16 return l
17
17
18 def _dirname(f):
18 def _dirname(f):
19 s = f.rfind("/")
19 s = f.rfind("/")
20 if s == -1:
20 if s == -1:
21 return ""
21 return ""
22 return f[:s]
22 return f[:s]
23
23
24 def _dirs(files):
24 def _dirs(files):
25 d = {}
25 d = {}
26 for f in files:
26 for f in files:
27 f = _dirname(f)
27 f = _dirname(f)
28 while f not in d:
28 while f not in d:
29 d[f] = True
29 d[f] = True
30 f = _dirname(f)
30 f = _dirname(f)
31 return d
31 return d
32
32
33 def _findoldnames(fctx, limit):
33 def _findoldnames(fctx, limit):
34 "find files that path was copied from, back to linkrev limit"
34 "find files that path was copied from, back to linkrev limit"
35 old = {}
35 old = {}
36 seen = {}
36 seen = {}
37 orig = fctx.path()
37 orig = fctx.path()
38 visit = [fctx]
38 visit = [fctx]
39 while visit:
39 while visit:
40 fc = visit.pop()
40 fc = visit.pop()
41 s = str(fc)
41 s = str(fc)
42 if s in seen:
42 if s in seen:
43 continue
43 continue
44 seen[s] = 1
44 seen[s] = 1
45 if fc.path() != orig and fc.path() not in old:
45 if fc.path() != orig and fc.path() not in old:
46 old[fc.path()] = 1
46 old[fc.path()] = 1
47 if fc.rev() < limit and fc.rev() is not None:
47 if fc.rev() < limit and fc.rev() is not None:
48 continue
48 continue
49 visit += fc.parents()
49 visit += fc.parents()
50
50
51 old = old.keys()
51 old = old.keys()
52 old.sort()
52 old.sort()
53 return old
53 return old
54
54
55 def copies(repo, c1, c2, ca):
55 def copies(repo, c1, c2, ca):
56 """
56 """
57 Find moves and copies between context c1 and c2
57 Find moves and copies between context c1 and c2
58 """
58 """
59 # avoid silly behavior for update from empty dir
59 # avoid silly behavior for update from empty dir
60 if not c1 or not c2 or not ca:
60 if not c1 or not c2:
61 return {}, {}
61 return {}, {}
62
62
63 rev1, rev2 = c1.rev(), c2.rev()
63 rev1, rev2 = c1.rev(), c2.rev()
64 if rev1 is None: # c1 is a workingctx
64 if rev1 is None: # c1 is a workingctx
65 rev1 = c1.parents()[0].rev()
65 rev1 = c1.parents()[0].rev()
66 if rev2 is None: # c2 is a workingctx
66 if rev2 is None: # c2 is a workingctx
67 rev2 = c2.parents()[0].rev()
67 rev2 = c2.parents()[0].rev()
68 pr = repo.changelog.parentrevs
68 pr = repo.changelog.parentrevs
69 def parents(rev):
69 def parents(rev):
70 return [p for p in pr(rev) if p != nullrev]
70 return [p for p in pr(rev) if p != nullrev]
71 limit = min(ancestor.symmetricdifference(rev1, rev2, parents))
71 limit = min(ancestor.symmetricdifference(rev1, rev2, parents))
72 m1 = c1.manifest()
72 m1 = c1.manifest()
73 m2 = c2.manifest()
73 m2 = c2.manifest()
74 ma = ca.manifest()
74 ma = ca.manifest()
75
75
76 def makectx(f, n):
76 def makectx(f, n):
77 if len(n) != 20: # in a working context?
77 if len(n) != 20: # in a working context?
78 if c1.rev() is None:
78 if c1.rev() is None:
79 return c1.filectx(f)
79 return c1.filectx(f)
80 return c2.filectx(f)
80 return c2.filectx(f)
81 return repo.filectx(f, fileid=n)
81 return repo.filectx(f, fileid=n)
82 ctx = util.cachefunc(makectx)
82 ctx = util.cachefunc(makectx)
83
83
84 copy = {}
84 copy = {}
85 fullcopy = {}
85 fullcopy = {}
86 diverge = {}
86 diverge = {}
87
87
88 def checkcopies(f, m1, m2):
88 def checkcopies(f, m1, m2):
89 '''check possible copies of f from m1 to m2'''
89 '''check possible copies of f from m1 to m2'''
90 c1 = ctx(f, m1[f])
90 c1 = ctx(f, m1[f])
91 for of in _findoldnames(c1, limit):
91 for of in _findoldnames(c1, limit):
92 fullcopy[f] = of # remember for dir rename detection
92 fullcopy[f] = of # remember for dir rename detection
93 if of in m2: # original file not in other manifest?
93 if of in m2: # original file not in other manifest?
94 # if the original file is unchanged on the other branch,
94 # if the original file is unchanged on the other branch,
95 # no merge needed
95 # no merge needed
96 if m2[of] != ma.get(of):
96 if m2[of] != ma.get(of):
97 c2 = ctx(of, m2[of])
97 c2 = ctx(of, m2[of])
98 ca = c1.ancestor(c2)
98 ca = c1.ancestor(c2)
99 # related and named changed on only one side?
99 # related and named changed on only one side?
100 if ca and ca.path() == f or ca.path() == c2.path():
100 if ca and ca.path() == f or ca.path() == c2.path():
101 if c1 != ca or c2 != ca: # merge needed?
101 if c1 != ca or c2 != ca: # merge needed?
102 copy[f] = of
102 copy[f] = of
103 elif of in ma:
103 elif of in ma:
104 diverge.setdefault(of, []).append(f)
104 diverge.setdefault(of, []).append(f)
105
105
106 if not repo.ui.configbool("merge", "followcopies", True):
106 if not repo.ui.configbool("merge", "followcopies", True):
107 return {}, {}
107 return {}, {}
108
108
109 repo.ui.debug(_(" searching for copies back to rev %d\n") % limit)
109 repo.ui.debug(_(" searching for copies back to rev %d\n") % limit)
110
110
111 u1 = _nonoverlap(m1, m2, ma)
111 u1 = _nonoverlap(m1, m2, ma)
112 u2 = _nonoverlap(m2, m1, ma)
112 u2 = _nonoverlap(m2, m1, ma)
113
113
114 if u1:
114 if u1:
115 repo.ui.debug(_(" unmatched files in local:\n %s\n")
115 repo.ui.debug(_(" unmatched files in local:\n %s\n")
116 % "\n ".join(u1))
116 % "\n ".join(u1))
117 if u2:
117 if u2:
118 repo.ui.debug(_(" unmatched files in other:\n %s\n")
118 repo.ui.debug(_(" unmatched files in other:\n %s\n")
119 % "\n ".join(u2))
119 % "\n ".join(u2))
120
120
121 for f in u1:
121 for f in u1:
122 checkcopies(f, m1, m2)
122 checkcopies(f, m1, m2)
123 for f in u2:
123 for f in u2:
124 checkcopies(f, m2, m1)
124 checkcopies(f, m2, m1)
125
125
126 diverge2 = {}
126 diverge2 = {}
127 for of, fl in diverge.items():
127 for of, fl in diverge.items():
128 if len(fl) == 1:
128 if len(fl) == 1:
129 del diverge[of] # not actually divergent
129 del diverge[of] # not actually divergent
130 else:
130 else:
131 diverge2.update(dict.fromkeys(fl)) # reverse map for below
131 diverge2.update(dict.fromkeys(fl)) # reverse map for below
132
132
133 if fullcopy:
133 if fullcopy:
134 repo.ui.debug(_(" all copies found (* = to merge, ! = divergent):\n"))
134 repo.ui.debug(_(" all copies found (* = to merge, ! = divergent):\n"))
135 for f in fullcopy:
135 for f in fullcopy:
136 note = ""
136 note = ""
137 if f in copy: note += "*"
137 if f in copy: note += "*"
138 if f in diverge2: note += "!"
138 if f in diverge2: note += "!"
139 repo.ui.debug(_(" %s -> %s %s\n") % (f, fullcopy[f], note))
139 repo.ui.debug(_(" %s -> %s %s\n") % (f, fullcopy[f], note))
140 del diverge2
140 del diverge2
141
141
142 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
142 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
143 return copy, diverge
143 return copy, diverge
144
144
145 repo.ui.debug(_(" checking for directory renames\n"))
145 repo.ui.debug(_(" checking for directory renames\n"))
146
146
147 # generate a directory move map
147 # generate a directory move map
148 d1, d2 = _dirs(m1), _dirs(m2)
148 d1, d2 = _dirs(m1), _dirs(m2)
149 invalid = {}
149 invalid = {}
150 dirmove = {}
150 dirmove = {}
151
151
152 # examine each file copy for a potential directory move, which is
152 # examine each file copy for a potential directory move, which is
153 # when all the files in a directory are moved to a new directory
153 # when all the files in a directory are moved to a new directory
154 for dst, src in fullcopy.items():
154 for dst, src in fullcopy.items():
155 dsrc, ddst = _dirname(src), _dirname(dst)
155 dsrc, ddst = _dirname(src), _dirname(dst)
156 if dsrc in invalid:
156 if dsrc in invalid:
157 # already seen to be uninteresting
157 # already seen to be uninteresting
158 continue
158 continue
159 elif dsrc in d1 and ddst in d1:
159 elif dsrc in d1 and ddst in d1:
160 # directory wasn't entirely moved locally
160 # directory wasn't entirely moved locally
161 invalid[dsrc] = True
161 invalid[dsrc] = True
162 elif dsrc in d2 and ddst in d2:
162 elif dsrc in d2 and ddst in d2:
163 # directory wasn't entirely moved remotely
163 # directory wasn't entirely moved remotely
164 invalid[dsrc] = True
164 invalid[dsrc] = True
165 elif dsrc in dirmove and dirmove[dsrc] != ddst:
165 elif dsrc in dirmove and dirmove[dsrc] != ddst:
166 # files from the same directory moved to two different places
166 # files from the same directory moved to two different places
167 invalid[dsrc] = True
167 invalid[dsrc] = True
168 else:
168 else:
169 # looks good so far
169 # looks good so far
170 dirmove[dsrc + "/"] = ddst + "/"
170 dirmove[dsrc + "/"] = ddst + "/"
171
171
172 for i in invalid:
172 for i in invalid:
173 if i in dirmove:
173 if i in dirmove:
174 del dirmove[i]
174 del dirmove[i]
175 del d1, d2, invalid
175 del d1, d2, invalid
176
176
177 if not dirmove:
177 if not dirmove:
178 return copy, diverge
178 return copy, diverge
179
179
180 for d in dirmove:
180 for d in dirmove:
181 repo.ui.debug(_(" dir %s -> %s\n") % (d, dirmove[d]))
181 repo.ui.debug(_(" dir %s -> %s\n") % (d, dirmove[d]))
182
182
183 # check unaccounted nonoverlapping files against directory moves
183 # check unaccounted nonoverlapping files against directory moves
184 for f in u1 + u2:
184 for f in u1 + u2:
185 if f not in fullcopy:
185 if f not in fullcopy:
186 for d in dirmove:
186 for d in dirmove:
187 if f.startswith(d):
187 if f.startswith(d):
188 # new file added in a directory that was moved, move it
188 # new file added in a directory that was moved, move it
189 copy[f] = dirmove[d] + f[len(d):]
189 copy[f] = dirmove[d] + f[len(d):]
190 repo.ui.debug(_(" file %s -> %s\n") % (f, copy[f]))
190 repo.ui.debug(_(" file %s -> %s\n") % (f, copy[f]))
191 break
191 break
192
192
193 return copy, diverge
193 return copy, diverge
@@ -1,399 +1,399 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import nullid, nullrev
8 from node import nullid, nullrev
9 from i18n import _
9 from i18n import _
10 import errno, util, os, filemerge, copies
10 import errno, util, os, filemerge, copies
11
11
12 def _checkunknown(wctx, mctx):
12 def _checkunknown(wctx, mctx):
13 "check for collisions between unknown files and files in mctx"
13 "check for collisions between unknown files and files in mctx"
14 for f in wctx.unknown():
14 for f in wctx.unknown():
15 if f in mctx and mctx[f].cmp(wctx[f].data()):
15 if f in mctx and mctx[f].cmp(wctx[f].data()):
16 raise util.Abort(_("untracked file in working directory differs"
16 raise util.Abort(_("untracked file in working directory differs"
17 " from file in requested revision: '%s'") % f)
17 " from file in requested revision: '%s'") % f)
18
18
19 def _checkcollision(mctx):
19 def _checkcollision(mctx):
20 "check for case folding collisions in the destination context"
20 "check for case folding collisions in the destination context"
21 folded = {}
21 folded = {}
22 for fn in mctx:
22 for fn in mctx:
23 fold = fn.lower()
23 fold = fn.lower()
24 if fold in folded:
24 if fold in folded:
25 raise util.Abort(_("case-folding collision between %s and %s")
25 raise util.Abort(_("case-folding collision between %s and %s")
26 % (fn, folded[fold]))
26 % (fn, folded[fold]))
27 folded[fold] = fn
27 folded[fold] = fn
28
28
29 def _forgetremoved(wctx, mctx, branchmerge):
29 def _forgetremoved(wctx, mctx, branchmerge):
30 """
30 """
31 Forget removed files
31 Forget removed files
32
32
33 If we're jumping between revisions (as opposed to merging), and if
33 If we're jumping between revisions (as opposed to merging), and if
34 neither the working directory nor the target rev has the file,
34 neither the working directory nor the target rev has the file,
35 then we need to remove it from the dirstate, to prevent the
35 then we need to remove it from the dirstate, to prevent the
36 dirstate from listing the file when it is no longer in the
36 dirstate from listing the file when it is no longer in the
37 manifest.
37 manifest.
38
38
39 If we're merging, and the other revision has removed a file
39 If we're merging, and the other revision has removed a file
40 that is not present in the working directory, we need to mark it
40 that is not present in the working directory, we need to mark it
41 as removed.
41 as removed.
42 """
42 """
43
43
44 action = []
44 action = []
45 state = branchmerge and 'r' or 'f'
45 state = branchmerge and 'r' or 'f'
46 for f in wctx.deleted():
46 for f in wctx.deleted():
47 if f not in mctx:
47 if f not in mctx:
48 action.append((f, state))
48 action.append((f, state))
49
49
50 if not branchmerge:
50 if not branchmerge:
51 for f in wctx.removed():
51 for f in wctx.removed():
52 if f not in mctx:
52 if f not in mctx:
53 action.append((f, "f"))
53 action.append((f, "f"))
54
54
55 return action
55 return action
56
56
57 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
57 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
58 """
58 """
59 Merge p1 and p2 with ancestor ma and generate merge action list
59 Merge p1 and p2 with ancestor ma and generate merge action list
60
60
61 overwrite = whether we clobber working files
61 overwrite = whether we clobber working files
62 partial = function to filter file lists
62 partial = function to filter file lists
63 """
63 """
64
64
65 repo.ui.note(_("resolving manifests\n"))
65 repo.ui.note(_("resolving manifests\n"))
66 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
66 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
67 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
67 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
68
68
69 m1 = p1.manifest()
69 m1 = p1.manifest()
70 m2 = p2.manifest()
70 m2 = p2.manifest()
71 ma = pa.manifest()
71 ma = pa.manifest()
72 backwards = (pa == p2)
72 backwards = (pa == p2)
73 action = []
73 action = []
74 copy, copied, diverge = {}, {}, {}
74 copy, copied, diverge = {}, {}, {}
75
75
76 def fmerge(f, f2=None, fa=None):
76 def fmerge(f, f2=None, fa=None):
77 """merge flags"""
77 """merge flags"""
78 if not f2:
78 if not f2:
79 f2 = f
79 f2 = f
80 fa = f
80 fa = f
81 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
81 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
82 if m == n: # flags agree
82 if m == n: # flags agree
83 return m # unchanged
83 return m # unchanged
84 if m and n: # flags are set but don't agree
84 if m and n: # flags are set but don't agree
85 if not a: # both differ from parent
85 if not a: # both differ from parent
86 r = repo.ui.prompt(
86 r = repo.ui.prompt(
87 _(" conflicting flags for %s\n"
87 _(" conflicting flags for %s\n"
88 "(n)one, e(x)ec or sym(l)ink?") % f, "[nxl]", "n")
88 "(n)one, e(x)ec or sym(l)ink?") % f, "[nxl]", "n")
89 return r != "n" and r or ''
89 return r != "n" and r or ''
90 if m == a:
90 if m == a:
91 return n # changed from m to n
91 return n # changed from m to n
92 return m # changed from n to m
92 return m # changed from n to m
93 if m and m != a: # changed from a to m
93 if m and m != a: # changed from a to m
94 return m
94 return m
95 if n and n != a: # changed from a to n
95 if n and n != a: # changed from a to n
96 return n
96 return n
97 return '' # flag was cleared
97 return '' # flag was cleared
98
98
99 def act(msg, m, f, *args):
99 def act(msg, m, f, *args):
100 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
100 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
101 action.append((f, m) + args)
101 action.append((f, m) + args)
102
102
103 if not (backwards or overwrite):
103 if pa and not (backwards or overwrite):
104 copy, diverge = copies.copies(repo, p1, p2, pa)
104 copy, diverge = copies.copies(repo, p1, p2, pa)
105 copied = dict.fromkeys(copy.values())
105 copied = dict.fromkeys(copy.values())
106 for of, fl in diverge.items():
106 for of, fl in diverge.items():
107 act("divergent renames", "dr", of, fl)
107 act("divergent renames", "dr", of, fl)
108
108
109 # Compare manifests
109 # Compare manifests
110 for f, n in m1.iteritems():
110 for f, n in m1.iteritems():
111 if partial and not partial(f):
111 if partial and not partial(f):
112 continue
112 continue
113 if f in m2:
113 if f in m2:
114 if overwrite or backwards:
114 if overwrite or backwards:
115 rflags = m2.flags(f)
115 rflags = m2.flags(f)
116 else:
116 else:
117 rflags = fmerge(f)
117 rflags = fmerge(f)
118 # are files different?
118 # are files different?
119 if n != m2[f]:
119 if n != m2[f]:
120 a = ma.get(f, nullid)
120 a = ma.get(f, nullid)
121 # are we clobbering?
121 # are we clobbering?
122 if overwrite:
122 if overwrite:
123 act("clobbering", "g", f, rflags)
123 act("clobbering", "g", f, rflags)
124 # or are we going back in time and clean?
124 # or are we going back in time and clean?
125 elif backwards and not n[20:]:
125 elif backwards and not n[20:]:
126 act("reverting", "g", f, rflags)
126 act("reverting", "g", f, rflags)
127 # are both different from the ancestor?
127 # are both different from the ancestor?
128 elif n != a and m2[f] != a:
128 elif n != a and m2[f] != a:
129 act("versions differ", "m", f, f, f, rflags, False)
129 act("versions differ", "m", f, f, f, rflags, False)
130 # is remote's version newer?
130 # is remote's version newer?
131 elif m2[f] != a:
131 elif m2[f] != a:
132 act("remote is newer", "g", f, rflags)
132 act("remote is newer", "g", f, rflags)
133 # local is newer, not overwrite, check mode bits
133 # local is newer, not overwrite, check mode bits
134 elif m1.flags(f) != rflags:
134 elif m1.flags(f) != rflags:
135 act("update permissions", "e", f, rflags)
135 act("update permissions", "e", f, rflags)
136 # contents same, check mode bits
136 # contents same, check mode bits
137 elif m1.flags(f) != rflags:
137 elif m1.flags(f) != rflags:
138 act("update permissions", "e", f, rflags)
138 act("update permissions", "e", f, rflags)
139 elif f in copied:
139 elif f in copied:
140 continue
140 continue
141 elif f in copy:
141 elif f in copy:
142 f2 = copy[f]
142 f2 = copy[f]
143 if f2 not in m2: # directory rename
143 if f2 not in m2: # directory rename
144 act("remote renamed directory to " + f2, "d",
144 act("remote renamed directory to " + f2, "d",
145 f, None, f2, m1.flags(f))
145 f, None, f2, m1.flags(f))
146 elif f2 in m1: # case 2 A,B/B/B
146 elif f2 in m1: # case 2 A,B/B/B
147 act("local copied to " + f2, "m",
147 act("local copied to " + f2, "m",
148 f, f2, f, fmerge(f, f2, f2), False)
148 f, f2, f, fmerge(f, f2, f2), False)
149 else: # case 4,21 A/B/B
149 else: # case 4,21 A/B/B
150 act("local moved to " + f2, "m",
150 act("local moved to " + f2, "m",
151 f, f2, f, fmerge(f, f2, f2), False)
151 f, f2, f, fmerge(f, f2, f2), False)
152 elif f in ma:
152 elif f in ma:
153 if n != ma[f] and not overwrite:
153 if n != ma[f] and not overwrite:
154 if repo.ui.prompt(
154 if repo.ui.prompt(
155 _(" local changed %s which remote deleted\n"
155 _(" local changed %s which remote deleted\n"
156 "use (c)hanged version or (d)elete?") % f,
156 "use (c)hanged version or (d)elete?") % f,
157 _("[cd]"), _("c")) == _("d"):
157 _("[cd]"), _("c")) == _("d"):
158 act("prompt delete", "r", f)
158 act("prompt delete", "r", f)
159 else:
159 else:
160 act("other deleted", "r", f)
160 act("other deleted", "r", f)
161 else:
161 else:
162 # file is created on branch or in working directory
162 # file is created on branch or in working directory
163 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
163 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
164 act("remote deleted", "r", f)
164 act("remote deleted", "r", f)
165
165
166 for f, n in m2.iteritems():
166 for f, n in m2.iteritems():
167 if partial and not partial(f):
167 if partial and not partial(f):
168 continue
168 continue
169 if f in m1:
169 if f in m1:
170 continue
170 continue
171 if f in copied:
171 if f in copied:
172 continue
172 continue
173 if f in copy:
173 if f in copy:
174 f2 = copy[f]
174 f2 = copy[f]
175 if f2 not in m1: # directory rename
175 if f2 not in m1: # directory rename
176 act("local renamed directory to " + f2, "d",
176 act("local renamed directory to " + f2, "d",
177 None, f, f2, m2.flags(f))
177 None, f, f2, m2.flags(f))
178 elif f2 in m2: # rename case 1, A/A,B/A
178 elif f2 in m2: # rename case 1, A/A,B/A
179 act("remote copied to " + f, "m",
179 act("remote copied to " + f, "m",
180 f2, f, f, fmerge(f2, f, f2), False)
180 f2, f, f, fmerge(f2, f, f2), False)
181 else: # case 3,20 A/B/A
181 else: # case 3,20 A/B/A
182 act("remote moved to " + f, "m",
182 act("remote moved to " + f, "m",
183 f2, f, f, fmerge(f2, f, f2), True)
183 f2, f, f, fmerge(f2, f, f2), True)
184 elif f in ma:
184 elif f in ma:
185 if overwrite or backwards:
185 if overwrite or backwards:
186 act("recreating", "g", f, m2.flags(f))
186 act("recreating", "g", f, m2.flags(f))
187 elif n != ma[f]:
187 elif n != ma[f]:
188 if repo.ui.prompt(
188 if repo.ui.prompt(
189 _("remote changed %s which local deleted\n"
189 _("remote changed %s which local deleted\n"
190 "use (c)hanged version or leave (d)eleted?") % f,
190 "use (c)hanged version or leave (d)eleted?") % f,
191 _("[cd]"), _("c")) == _("c"):
191 _("[cd]"), _("c")) == _("c"):
192 act("prompt recreating", "g", f, m2.flags(f))
192 act("prompt recreating", "g", f, m2.flags(f))
193 else:
193 else:
194 act("remote created", "g", f, m2.flags(f))
194 act("remote created", "g", f, m2.flags(f))
195
195
196 return action
196 return action
197
197
198 def applyupdates(repo, action, wctx, mctx):
198 def applyupdates(repo, action, wctx, mctx):
199 "apply the merge action list to the working directory"
199 "apply the merge action list to the working directory"
200
200
201 updated, merged, removed, unresolved = 0, 0, 0, 0
201 updated, merged, removed, unresolved = 0, 0, 0, 0
202 action.sort()
202 action.sort()
203 # prescan for copy/renames
203 # prescan for copy/renames
204 for a in action:
204 for a in action:
205 f, m = a[:2]
205 f, m = a[:2]
206 if m == 'm': # merge
206 if m == 'm': # merge
207 f2, fd, flags, move = a[2:]
207 f2, fd, flags, move = a[2:]
208 if f != fd:
208 if f != fd:
209 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
209 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
210 repo.wwrite(fd, repo.wread(f), flags)
210 repo.wwrite(fd, repo.wread(f), flags)
211
211
212 audit_path = util.path_auditor(repo.root)
212 audit_path = util.path_auditor(repo.root)
213
213
214 for a in action:
214 for a in action:
215 f, m = a[:2]
215 f, m = a[:2]
216 if f and f[0] == "/":
216 if f and f[0] == "/":
217 continue
217 continue
218 if m == "r": # remove
218 if m == "r": # remove
219 repo.ui.note(_("removing %s\n") % f)
219 repo.ui.note(_("removing %s\n") % f)
220 audit_path(f)
220 audit_path(f)
221 try:
221 try:
222 util.unlink(repo.wjoin(f))
222 util.unlink(repo.wjoin(f))
223 except OSError, inst:
223 except OSError, inst:
224 if inst.errno != errno.ENOENT:
224 if inst.errno != errno.ENOENT:
225 repo.ui.warn(_("update failed to remove %s: %s!\n") %
225 repo.ui.warn(_("update failed to remove %s: %s!\n") %
226 (f, inst.strerror))
226 (f, inst.strerror))
227 removed += 1
227 removed += 1
228 elif m == "m": # merge
228 elif m == "m": # merge
229 f2, fd, flags, move = a[2:]
229 f2, fd, flags, move = a[2:]
230 r = filemerge.filemerge(repo, f, fd, f2, wctx, mctx)
230 r = filemerge.filemerge(repo, f, fd, f2, wctx, mctx)
231 if r > 0:
231 if r > 0:
232 unresolved += 1
232 unresolved += 1
233 else:
233 else:
234 if r is None:
234 if r is None:
235 updated += 1
235 updated += 1
236 else:
236 else:
237 merged += 1
237 merged += 1
238 util.set_flags(repo.wjoin(fd), flags)
238 util.set_flags(repo.wjoin(fd), flags)
239 if f != fd and move and util.lexists(repo.wjoin(f)):
239 if f != fd and move and util.lexists(repo.wjoin(f)):
240 repo.ui.debug(_("removing %s\n") % f)
240 repo.ui.debug(_("removing %s\n") % f)
241 os.unlink(repo.wjoin(f))
241 os.unlink(repo.wjoin(f))
242 elif m == "g": # get
242 elif m == "g": # get
243 flags = a[2]
243 flags = a[2]
244 repo.ui.note(_("getting %s\n") % f)
244 repo.ui.note(_("getting %s\n") % f)
245 t = mctx.filectx(f).data()
245 t = mctx.filectx(f).data()
246 repo.wwrite(f, t, flags)
246 repo.wwrite(f, t, flags)
247 updated += 1
247 updated += 1
248 elif m == "d": # directory rename
248 elif m == "d": # directory rename
249 f2, fd, flags = a[2:]
249 f2, fd, flags = a[2:]
250 if f:
250 if f:
251 repo.ui.note(_("moving %s to %s\n") % (f, fd))
251 repo.ui.note(_("moving %s to %s\n") % (f, fd))
252 t = wctx.filectx(f).data()
252 t = wctx.filectx(f).data()
253 repo.wwrite(fd, t, flags)
253 repo.wwrite(fd, t, flags)
254 util.unlink(repo.wjoin(f))
254 util.unlink(repo.wjoin(f))
255 if f2:
255 if f2:
256 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
256 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
257 t = mctx.filectx(f2).data()
257 t = mctx.filectx(f2).data()
258 repo.wwrite(fd, t, flags)
258 repo.wwrite(fd, t, flags)
259 updated += 1
259 updated += 1
260 elif m == "dr": # divergent renames
260 elif m == "dr": # divergent renames
261 fl = a[2]
261 fl = a[2]
262 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
262 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
263 for nf in fl:
263 for nf in fl:
264 repo.ui.warn(" %s\n" % nf)
264 repo.ui.warn(" %s\n" % nf)
265 elif m == "e": # exec
265 elif m == "e": # exec
266 flags = a[2]
266 flags = a[2]
267 util.set_flags(repo.wjoin(f), flags)
267 util.set_flags(repo.wjoin(f), flags)
268
268
269 return updated, merged, removed, unresolved
269 return updated, merged, removed, unresolved
270
270
271 def recordupdates(repo, action, branchmerge):
271 def recordupdates(repo, action, branchmerge):
272 "record merge actions to the dirstate"
272 "record merge actions to the dirstate"
273
273
274 for a in action:
274 for a in action:
275 f, m = a[:2]
275 f, m = a[:2]
276 if m == "r": # remove
276 if m == "r": # remove
277 if branchmerge:
277 if branchmerge:
278 repo.dirstate.remove(f)
278 repo.dirstate.remove(f)
279 else:
279 else:
280 repo.dirstate.forget(f)
280 repo.dirstate.forget(f)
281 elif m == "f": # forget
281 elif m == "f": # forget
282 repo.dirstate.forget(f)
282 repo.dirstate.forget(f)
283 elif m in "ge": # get or exec change
283 elif m in "ge": # get or exec change
284 if branchmerge:
284 if branchmerge:
285 repo.dirstate.normaldirty(f)
285 repo.dirstate.normaldirty(f)
286 else:
286 else:
287 repo.dirstate.normal(f)
287 repo.dirstate.normal(f)
288 elif m == "m": # merge
288 elif m == "m": # merge
289 f2, fd, flag, move = a[2:]
289 f2, fd, flag, move = a[2:]
290 if branchmerge:
290 if branchmerge:
291 # We've done a branch merge, mark this file as merged
291 # We've done a branch merge, mark this file as merged
292 # so that we properly record the merger later
292 # so that we properly record the merger later
293 repo.dirstate.merge(fd)
293 repo.dirstate.merge(fd)
294 if f != f2: # copy/rename
294 if f != f2: # copy/rename
295 if move:
295 if move:
296 repo.dirstate.remove(f)
296 repo.dirstate.remove(f)
297 if f != fd:
297 if f != fd:
298 repo.dirstate.copy(f, fd)
298 repo.dirstate.copy(f, fd)
299 else:
299 else:
300 repo.dirstate.copy(f2, fd)
300 repo.dirstate.copy(f2, fd)
301 else:
301 else:
302 # We've update-merged a locally modified file, so
302 # We've update-merged a locally modified file, so
303 # we set the dirstate to emulate a normal checkout
303 # we set the dirstate to emulate a normal checkout
304 # of that file some time in the past. Thus our
304 # of that file some time in the past. Thus our
305 # merge will appear as a normal local file
305 # merge will appear as a normal local file
306 # modification.
306 # modification.
307 repo.dirstate.normallookup(fd)
307 repo.dirstate.normallookup(fd)
308 if move:
308 if move:
309 repo.dirstate.forget(f)
309 repo.dirstate.forget(f)
310 elif m == "d": # directory rename
310 elif m == "d": # directory rename
311 f2, fd, flag = a[2:]
311 f2, fd, flag = a[2:]
312 if not f2 and f not in repo.dirstate:
312 if not f2 and f not in repo.dirstate:
313 # untracked file moved
313 # untracked file moved
314 continue
314 continue
315 if branchmerge:
315 if branchmerge:
316 repo.dirstate.add(fd)
316 repo.dirstate.add(fd)
317 if f:
317 if f:
318 repo.dirstate.remove(f)
318 repo.dirstate.remove(f)
319 repo.dirstate.copy(f, fd)
319 repo.dirstate.copy(f, fd)
320 if f2:
320 if f2:
321 repo.dirstate.copy(f2, fd)
321 repo.dirstate.copy(f2, fd)
322 else:
322 else:
323 repo.dirstate.normal(fd)
323 repo.dirstate.normal(fd)
324 if f:
324 if f:
325 repo.dirstate.forget(f)
325 repo.dirstate.forget(f)
326
326
327 def update(repo, node, branchmerge, force, partial):
327 def update(repo, node, branchmerge, force, partial):
328 """
328 """
329 Perform a merge between the working directory and the given node
329 Perform a merge between the working directory and the given node
330
330
331 branchmerge = whether to merge between branches
331 branchmerge = whether to merge between branches
332 force = whether to force branch merging or file overwriting
332 force = whether to force branch merging or file overwriting
333 partial = a function to filter file lists (dirstate not updated)
333 partial = a function to filter file lists (dirstate not updated)
334 """
334 """
335
335
336 wlock = repo.wlock()
336 wlock = repo.wlock()
337 try:
337 try:
338 wc = repo.workingctx()
338 wc = repo.workingctx()
339 if node is None:
339 if node is None:
340 # tip of current branch
340 # tip of current branch
341 try:
341 try:
342 node = repo.branchtags()[wc.branch()]
342 node = repo.branchtags()[wc.branch()]
343 except KeyError:
343 except KeyError:
344 if wc.branch() == "default": # no default branch!
344 if wc.branch() == "default": # no default branch!
345 node = repo.lookup("tip") # update to tip
345 node = repo.lookup("tip") # update to tip
346 else:
346 else:
347 raise util.Abort(_("branch %s not found") % wc.branch())
347 raise util.Abort(_("branch %s not found") % wc.branch())
348 overwrite = force and not branchmerge
348 overwrite = force and not branchmerge
349 forcemerge = force and branchmerge
349 forcemerge = force and branchmerge
350 pl = wc.parents()
350 pl = wc.parents()
351 p1, p2 = pl[0], repo.changectx(node)
351 p1, p2 = pl[0], repo.changectx(node)
352 pa = p1.ancestor(p2)
352 pa = p1.ancestor(p2)
353 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
353 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
354 fastforward = False
354 fastforward = False
355
355
356 ### check phase
356 ### check phase
357 if not overwrite and len(pl) > 1:
357 if not overwrite and len(pl) > 1:
358 raise util.Abort(_("outstanding uncommitted merges"))
358 raise util.Abort(_("outstanding uncommitted merges"))
359 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
359 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
360 if branchmerge:
360 if branchmerge:
361 if p1.branch() != p2.branch() and pa != p2:
361 if p1.branch() != p2.branch() and pa != p2:
362 fastforward = True
362 fastforward = True
363 else:
363 else:
364 raise util.Abort(_("there is nothing to merge, just use "
364 raise util.Abort(_("there is nothing to merge, just use "
365 "'hg update' or look at 'hg heads'"))
365 "'hg update' or look at 'hg heads'"))
366 elif not (overwrite or branchmerge):
366 elif not (overwrite or branchmerge):
367 raise util.Abort(_("update spans branches, use 'hg merge' "
367 raise util.Abort(_("update spans branches, use 'hg merge' "
368 "or 'hg update -C' to lose changes"))
368 "or 'hg update -C' to lose changes"))
369 if branchmerge and not forcemerge:
369 if branchmerge and not forcemerge:
370 if wc.files() or wc.deleted():
370 if wc.files() or wc.deleted():
371 raise util.Abort(_("outstanding uncommitted changes"))
371 raise util.Abort(_("outstanding uncommitted changes"))
372
372
373 ### calculate phase
373 ### calculate phase
374 action = []
374 action = []
375 if not force:
375 if not force:
376 _checkunknown(wc, p2)
376 _checkunknown(wc, p2)
377 if not util.checkfolding(repo.path):
377 if not util.checkfolding(repo.path):
378 _checkcollision(p2)
378 _checkcollision(p2)
379 action += _forgetremoved(wc, p2, branchmerge)
379 action += _forgetremoved(wc, p2, branchmerge)
380 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
380 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
381
381
382 ### apply phase
382 ### apply phase
383 if not branchmerge: # just jump to the new rev
383 if not branchmerge: # just jump to the new rev
384 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
384 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
385 if not partial:
385 if not partial:
386 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
386 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
387
387
388 stats = applyupdates(repo, action, wc, p2)
388 stats = applyupdates(repo, action, wc, p2)
389
389
390 if not partial:
390 if not partial:
391 recordupdates(repo, action, branchmerge)
391 recordupdates(repo, action, branchmerge)
392 repo.dirstate.setparents(fp1, fp2)
392 repo.dirstate.setparents(fp1, fp2)
393 if not branchmerge and not fastforward:
393 if not branchmerge and not fastforward:
394 repo.dirstate.setbranch(p2.branch())
394 repo.dirstate.setbranch(p2.branch())
395 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
395 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
396
396
397 return stats
397 return stats
398 finally:
398 finally:
399 del wlock
399 del wlock
@@ -1,1393 +1,1345 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 from node import hex, nullid, short
10 from node import hex, nullid, short
11 import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
11 import base85, cmdutil, mdiff, util, context, revlog, diffhelpers, copies
12 import cStringIO, email.Parser, os, popen2, re, sha, errno
12 import cStringIO, email.Parser, os, popen2, re, sha, errno
13 import sys, tempfile, zlib
13 import sys, tempfile, zlib
14
14
15 class PatchError(Exception):
15 class PatchError(Exception):
16 pass
16 pass
17
17
18 class NoHunks(PatchError):
18 class NoHunks(PatchError):
19 pass
19 pass
20
20
21 # helper functions
21 # helper functions
22
22
23 def copyfile(src, dst, basedir=None):
23 def copyfile(src, dst, basedir=None):
24 if not basedir:
24 if not basedir:
25 basedir = os.getcwd()
25 basedir = os.getcwd()
26
26
27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
28 if os.path.exists(absdst):
28 if os.path.exists(absdst):
29 raise util.Abort(_("cannot create %s: destination already exists") %
29 raise util.Abort(_("cannot create %s: destination already exists") %
30 dst)
30 dst)
31
31
32 targetdir = os.path.dirname(absdst)
32 targetdir = os.path.dirname(absdst)
33 if not os.path.isdir(targetdir):
33 if not os.path.isdir(targetdir):
34 os.makedirs(targetdir)
34 os.makedirs(targetdir)
35
35
36 util.copyfile(abssrc, absdst)
36 util.copyfile(abssrc, absdst)
37
37
38 # public functions
38 # public functions
39
39
40 def extract(ui, fileobj):
40 def extract(ui, fileobj):
41 '''extract patch from data read from fileobj.
41 '''extract patch from data read from fileobj.
42
42
43 patch can be a normal patch or contained in an email message.
43 patch can be a normal patch or contained in an email message.
44
44
45 return tuple (filename, message, user, date, node, p1, p2).
45 return tuple (filename, message, user, date, node, p1, p2).
46 Any item in the returned tuple can be None. If filename is None,
46 Any item in the returned tuple can be None. If filename is None,
47 fileobj did not contain a patch. Caller must unlink filename when done.'''
47 fileobj did not contain a patch. Caller must unlink filename when done.'''
48
48
49 # attempt to detect the start of a patch
49 # attempt to detect the start of a patch
50 # (this heuristic is borrowed from quilt)
50 # (this heuristic is borrowed from quilt)
51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
53 '(---|\*\*\*)[ \t])', re.MULTILINE)
53 '(---|\*\*\*)[ \t])', re.MULTILINE)
54
54
55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
56 tmpfp = os.fdopen(fd, 'w')
56 tmpfp = os.fdopen(fd, 'w')
57 try:
57 try:
58 msg = email.Parser.Parser().parse(fileobj)
58 msg = email.Parser.Parser().parse(fileobj)
59
59
60 subject = msg['Subject']
60 subject = msg['Subject']
61 user = msg['From']
61 user = msg['From']
62 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
62 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
63 # should try to parse msg['Date']
63 # should try to parse msg['Date']
64 date = None
64 date = None
65 nodeid = None
65 nodeid = None
66 branch = None
66 branch = None
67 parents = []
67 parents = []
68
68
69 if subject:
69 if subject:
70 if subject.startswith('[PATCH'):
70 if subject.startswith('[PATCH'):
71 pend = subject.find(']')
71 pend = subject.find(']')
72 if pend >= 0:
72 if pend >= 0:
73 subject = subject[pend+1:].lstrip()
73 subject = subject[pend+1:].lstrip()
74 subject = subject.replace('\n\t', ' ')
74 subject = subject.replace('\n\t', ' ')
75 ui.debug('Subject: %s\n' % subject)
75 ui.debug('Subject: %s\n' % subject)
76 if user:
76 if user:
77 ui.debug('From: %s\n' % user)
77 ui.debug('From: %s\n' % user)
78 diffs_seen = 0
78 diffs_seen = 0
79 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
79 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
80 message = ''
80 message = ''
81 for part in msg.walk():
81 for part in msg.walk():
82 content_type = part.get_content_type()
82 content_type = part.get_content_type()
83 ui.debug('Content-Type: %s\n' % content_type)
83 ui.debug('Content-Type: %s\n' % content_type)
84 if content_type not in ok_types:
84 if content_type not in ok_types:
85 continue
85 continue
86 payload = part.get_payload(decode=True)
86 payload = part.get_payload(decode=True)
87 m = diffre.search(payload)
87 m = diffre.search(payload)
88 if m:
88 if m:
89 hgpatch = False
89 hgpatch = False
90 ignoretext = False
90 ignoretext = False
91
91
92 ui.debug(_('found patch at byte %d\n') % m.start(0))
92 ui.debug(_('found patch at byte %d\n') % m.start(0))
93 diffs_seen += 1
93 diffs_seen += 1
94 cfp = cStringIO.StringIO()
94 cfp = cStringIO.StringIO()
95 for line in payload[:m.start(0)].splitlines():
95 for line in payload[:m.start(0)].splitlines():
96 if line.startswith('# HG changeset patch'):
96 if line.startswith('# HG changeset patch'):
97 ui.debug(_('patch generated by hg export\n'))
97 ui.debug(_('patch generated by hg export\n'))
98 hgpatch = True
98 hgpatch = True
99 # drop earlier commit message content
99 # drop earlier commit message content
100 cfp.seek(0)
100 cfp.seek(0)
101 cfp.truncate()
101 cfp.truncate()
102 subject = None
102 subject = None
103 elif hgpatch:
103 elif hgpatch:
104 if line.startswith('# User '):
104 if line.startswith('# User '):
105 user = line[7:]
105 user = line[7:]
106 ui.debug('From: %s\n' % user)
106 ui.debug('From: %s\n' % user)
107 elif line.startswith("# Date "):
107 elif line.startswith("# Date "):
108 date = line[7:]
108 date = line[7:]
109 elif line.startswith("# Branch "):
109 elif line.startswith("# Branch "):
110 branch = line[9:]
110 branch = line[9:]
111 elif line.startswith("# Node ID "):
111 elif line.startswith("# Node ID "):
112 nodeid = line[10:]
112 nodeid = line[10:]
113 elif line.startswith("# Parent "):
113 elif line.startswith("# Parent "):
114 parents.append(line[10:])
114 parents.append(line[10:])
115 elif line == '---' and gitsendmail:
115 elif line == '---' and gitsendmail:
116 ignoretext = True
116 ignoretext = True
117 if not line.startswith('# ') and not ignoretext:
117 if not line.startswith('# ') and not ignoretext:
118 cfp.write(line)
118 cfp.write(line)
119 cfp.write('\n')
119 cfp.write('\n')
120 message = cfp.getvalue()
120 message = cfp.getvalue()
121 if tmpfp:
121 if tmpfp:
122 tmpfp.write(payload)
122 tmpfp.write(payload)
123 if not payload.endswith('\n'):
123 if not payload.endswith('\n'):
124 tmpfp.write('\n')
124 tmpfp.write('\n')
125 elif not diffs_seen and message and content_type == 'text/plain':
125 elif not diffs_seen and message and content_type == 'text/plain':
126 message += '\n' + payload
126 message += '\n' + payload
127 except:
127 except:
128 tmpfp.close()
128 tmpfp.close()
129 os.unlink(tmpname)
129 os.unlink(tmpname)
130 raise
130 raise
131
131
132 if subject and not message.startswith(subject):
132 if subject and not message.startswith(subject):
133 message = '%s\n%s' % (subject, message)
133 message = '%s\n%s' % (subject, message)
134 tmpfp.close()
134 tmpfp.close()
135 if not diffs_seen:
135 if not diffs_seen:
136 os.unlink(tmpname)
136 os.unlink(tmpname)
137 return None, message, user, date, branch, None, None, None
137 return None, message, user, date, branch, None, None, None
138 p1 = parents and parents.pop(0) or None
138 p1 = parents and parents.pop(0) or None
139 p2 = parents and parents.pop(0) or None
139 p2 = parents and parents.pop(0) or None
140 return tmpname, message, user, date, branch, nodeid, p1, p2
140 return tmpname, message, user, date, branch, nodeid, p1, p2
141
141
142 GP_PATCH = 1 << 0 # we have to run patch
142 GP_PATCH = 1 << 0 # we have to run patch
143 GP_FILTER = 1 << 1 # there's some copy/rename operation
143 GP_FILTER = 1 << 1 # there's some copy/rename operation
144 GP_BINARY = 1 << 2 # there's a binary patch
144 GP_BINARY = 1 << 2 # there's a binary patch
145
145
146 def readgitpatch(fp, firstline=None):
146 def readgitpatch(fp, firstline=None):
147 """extract git-style metadata about patches from <patchname>"""
147 """extract git-style metadata about patches from <patchname>"""
148 class gitpatch:
148 class gitpatch:
149 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
149 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
150 def __init__(self, path):
150 def __init__(self, path):
151 self.path = path
151 self.path = path
152 self.oldpath = None
152 self.oldpath = None
153 self.mode = None
153 self.mode = None
154 self.op = 'MODIFY'
154 self.op = 'MODIFY'
155 self.lineno = 0
155 self.lineno = 0
156 self.binary = False
156 self.binary = False
157
157
158 def reader(fp, firstline):
158 def reader(fp, firstline):
159 if firstline is not None:
159 if firstline is not None:
160 yield firstline
160 yield firstline
161 for line in fp:
161 for line in fp:
162 yield line
162 yield line
163
163
164 # Filter patch for git information
164 # Filter patch for git information
165 gitre = re.compile('diff --git a/(.*) b/(.*)')
165 gitre = re.compile('diff --git a/(.*) b/(.*)')
166 gp = None
166 gp = None
167 gitpatches = []
167 gitpatches = []
168 # Can have a git patch with only metadata, causing patch to complain
168 # Can have a git patch with only metadata, causing patch to complain
169 dopatch = 0
169 dopatch = 0
170
170
171 lineno = 0
171 lineno = 0
172 for line in reader(fp, firstline):
172 for line in reader(fp, firstline):
173 lineno += 1
173 lineno += 1
174 if line.startswith('diff --git'):
174 if line.startswith('diff --git'):
175 m = gitre.match(line)
175 m = gitre.match(line)
176 if m:
176 if m:
177 if gp:
177 if gp:
178 gitpatches.append(gp)
178 gitpatches.append(gp)
179 src, dst = m.group(1, 2)
179 src, dst = m.group(1, 2)
180 gp = gitpatch(dst)
180 gp = gitpatch(dst)
181 gp.lineno = lineno
181 gp.lineno = lineno
182 elif gp:
182 elif gp:
183 if line.startswith('--- '):
183 if line.startswith('--- '):
184 if gp.op in ('COPY', 'RENAME'):
184 if gp.op in ('COPY', 'RENAME'):
185 dopatch |= GP_FILTER
185 dopatch |= GP_FILTER
186 gitpatches.append(gp)
186 gitpatches.append(gp)
187 gp = None
187 gp = None
188 dopatch |= GP_PATCH
188 dopatch |= GP_PATCH
189 continue
189 continue
190 if line.startswith('rename from '):
190 if line.startswith('rename from '):
191 gp.op = 'RENAME'
191 gp.op = 'RENAME'
192 gp.oldpath = line[12:].rstrip()
192 gp.oldpath = line[12:].rstrip()
193 elif line.startswith('rename to '):
193 elif line.startswith('rename to '):
194 gp.path = line[10:].rstrip()
194 gp.path = line[10:].rstrip()
195 elif line.startswith('copy from '):
195 elif line.startswith('copy from '):
196 gp.op = 'COPY'
196 gp.op = 'COPY'
197 gp.oldpath = line[10:].rstrip()
197 gp.oldpath = line[10:].rstrip()
198 elif line.startswith('copy to '):
198 elif line.startswith('copy to '):
199 gp.path = line[8:].rstrip()
199 gp.path = line[8:].rstrip()
200 elif line.startswith('deleted file'):
200 elif line.startswith('deleted file'):
201 gp.op = 'DELETE'
201 gp.op = 'DELETE'
202 elif line.startswith('new file mode '):
202 elif line.startswith('new file mode '):
203 gp.op = 'ADD'
203 gp.op = 'ADD'
204 gp.mode = int(line.rstrip()[-6:], 8)
204 gp.mode = int(line.rstrip()[-6:], 8)
205 elif line.startswith('new mode '):
205 elif line.startswith('new mode '):
206 gp.mode = int(line.rstrip()[-6:], 8)
206 gp.mode = int(line.rstrip()[-6:], 8)
207 elif line.startswith('GIT binary patch'):
207 elif line.startswith('GIT binary patch'):
208 dopatch |= GP_BINARY
208 dopatch |= GP_BINARY
209 gp.binary = True
209 gp.binary = True
210 if gp:
210 if gp:
211 gitpatches.append(gp)
211 gitpatches.append(gp)
212
212
213 if not gitpatches:
213 if not gitpatches:
214 dopatch = GP_PATCH
214 dopatch = GP_PATCH
215
215
216 return (dopatch, gitpatches)
216 return (dopatch, gitpatches)
217
217
218 def patch(patchname, ui, strip=1, cwd=None, files={}):
218 def patch(patchname, ui, strip=1, cwd=None, files={}):
219 """apply <patchname> to the working directory.
219 """apply <patchname> to the working directory.
220 returns whether patch was applied with fuzz factor."""
220 returns whether patch was applied with fuzz factor."""
221 patcher = ui.config('ui', 'patch')
221 patcher = ui.config('ui', 'patch')
222 args = []
222 args = []
223 try:
223 try:
224 if patcher:
224 if patcher:
225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
226 files)
226 files)
227 else:
227 else:
228 try:
228 try:
229 return internalpatch(patchname, ui, strip, cwd, files)
229 return internalpatch(patchname, ui, strip, cwd, files)
230 except NoHunks:
230 except NoHunks:
231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
232 ui.debug('no valid hunks found; trying with %r instead\n' %
232 ui.debug('no valid hunks found; trying with %r instead\n' %
233 patcher)
233 patcher)
234 if util.needbinarypatch():
234 if util.needbinarypatch():
235 args.append('--binary')
235 args.append('--binary')
236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
237 files)
237 files)
238 except PatchError, err:
238 except PatchError, err:
239 s = str(err)
239 s = str(err)
240 if s:
240 if s:
241 raise util.Abort(s)
241 raise util.Abort(s)
242 else:
242 else:
243 raise util.Abort(_('patch failed to apply'))
243 raise util.Abort(_('patch failed to apply'))
244
244
245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
246 """use <patcher> to apply <patchname> to the working directory.
246 """use <patcher> to apply <patchname> to the working directory.
247 returns whether patch was applied with fuzz factor."""
247 returns whether patch was applied with fuzz factor."""
248
248
249 fuzz = False
249 fuzz = False
250 if cwd:
250 if cwd:
251 args.append('-d %s' % util.shellquote(cwd))
251 args.append('-d %s' % util.shellquote(cwd))
252 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
252 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
253 util.shellquote(patchname)))
253 util.shellquote(patchname)))
254
254
255 for line in fp:
255 for line in fp:
256 line = line.rstrip()
256 line = line.rstrip()
257 ui.note(line + '\n')
257 ui.note(line + '\n')
258 if line.startswith('patching file '):
258 if line.startswith('patching file '):
259 pf = util.parse_patch_output(line)
259 pf = util.parse_patch_output(line)
260 printed_file = False
260 printed_file = False
261 files.setdefault(pf, (None, None))
261 files.setdefault(pf, (None, None))
262 elif line.find('with fuzz') >= 0:
262 elif line.find('with fuzz') >= 0:
263 fuzz = True
263 fuzz = True
264 if not printed_file:
264 if not printed_file:
265 ui.warn(pf + '\n')
265 ui.warn(pf + '\n')
266 printed_file = True
266 printed_file = True
267 ui.warn(line + '\n')
267 ui.warn(line + '\n')
268 elif line.find('saving rejects to file') >= 0:
268 elif line.find('saving rejects to file') >= 0:
269 ui.warn(line + '\n')
269 ui.warn(line + '\n')
270 elif line.find('FAILED') >= 0:
270 elif line.find('FAILED') >= 0:
271 if not printed_file:
271 if not printed_file:
272 ui.warn(pf + '\n')
272 ui.warn(pf + '\n')
273 printed_file = True
273 printed_file = True
274 ui.warn(line + '\n')
274 ui.warn(line + '\n')
275 code = fp.close()
275 code = fp.close()
276 if code:
276 if code:
277 raise PatchError(_("patch command failed: %s") %
277 raise PatchError(_("patch command failed: %s") %
278 util.explain_exit(code)[0])
278 util.explain_exit(code)[0])
279 return fuzz
279 return fuzz
280
280
281 def internalpatch(patchobj, ui, strip, cwd, files={}):
281 def internalpatch(patchobj, ui, strip, cwd, files={}):
282 """use builtin patch to apply <patchobj> to the working directory.
282 """use builtin patch to apply <patchobj> to the working directory.
283 returns whether patch was applied with fuzz factor."""
283 returns whether patch was applied with fuzz factor."""
284 try:
284 try:
285 fp = file(patchobj, 'rb')
285 fp = file(patchobj, 'rb')
286 except TypeError:
286 except TypeError:
287 fp = patchobj
287 fp = patchobj
288 if cwd:
288 if cwd:
289 curdir = os.getcwd()
289 curdir = os.getcwd()
290 os.chdir(cwd)
290 os.chdir(cwd)
291 try:
291 try:
292 ret = applydiff(ui, fp, files, strip=strip)
292 ret = applydiff(ui, fp, files, strip=strip)
293 finally:
293 finally:
294 if cwd:
294 if cwd:
295 os.chdir(curdir)
295 os.chdir(curdir)
296 if ret < 0:
296 if ret < 0:
297 raise PatchError
297 raise PatchError
298 return ret > 0
298 return ret > 0
299
299
300 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
300 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
301 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
301 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
302 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
302 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
303
303
304 class patchfile:
304 class patchfile:
305 def __init__(self, ui, fname, missing=False):
305 def __init__(self, ui, fname, missing=False):
306 self.fname = fname
306 self.fname = fname
307 self.ui = ui
307 self.ui = ui
308 self.lines = []
308 self.lines = []
309 self.exists = False
309 self.exists = False
310 self.missing = missing
310 self.missing = missing
311 if not missing:
311 if not missing:
312 try:
312 try:
313 fp = file(fname, 'rb')
313 fp = file(fname, 'rb')
314 self.lines = fp.readlines()
314 self.lines = fp.readlines()
315 self.exists = True
315 self.exists = True
316 except IOError:
316 except IOError:
317 pass
317 pass
318 else:
318 else:
319 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
319 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
320
320
321 if not self.exists:
321 if not self.exists:
322 dirname = os.path.dirname(fname)
322 dirname = os.path.dirname(fname)
323 if dirname and not os.path.isdir(dirname):
323 if dirname and not os.path.isdir(dirname):
324 os.makedirs(dirname)
324 os.makedirs(dirname)
325
325
326 self.hash = {}
326 self.hash = {}
327 self.dirty = 0
327 self.dirty = 0
328 self.offset = 0
328 self.offset = 0
329 self.rej = []
329 self.rej = []
330 self.fileprinted = False
330 self.fileprinted = False
331 self.printfile(False)
331 self.printfile(False)
332 self.hunks = 0
332 self.hunks = 0
333
333
334 def printfile(self, warn):
334 def printfile(self, warn):
335 if self.fileprinted:
335 if self.fileprinted:
336 return
336 return
337 if warn or self.ui.verbose:
337 if warn or self.ui.verbose:
338 self.fileprinted = True
338 self.fileprinted = True
339 s = _("patching file %s\n") % self.fname
339 s = _("patching file %s\n") % self.fname
340 if warn:
340 if warn:
341 self.ui.warn(s)
341 self.ui.warn(s)
342 else:
342 else:
343 self.ui.note(s)
343 self.ui.note(s)
344
344
345
345
346 def findlines(self, l, linenum):
346 def findlines(self, l, linenum):
347 # looks through the hash and finds candidate lines. The
347 # looks through the hash and finds candidate lines. The
348 # result is a list of line numbers sorted based on distance
348 # result is a list of line numbers sorted based on distance
349 # from linenum
349 # from linenum
350 def sorter(a, b):
350 def sorter(a, b):
351 vala = abs(a - linenum)
351 vala = abs(a - linenum)
352 valb = abs(b - linenum)
352 valb = abs(b - linenum)
353 return cmp(vala, valb)
353 return cmp(vala, valb)
354
354
355 try:
355 try:
356 cand = self.hash[l]
356 cand = self.hash[l]
357 except:
357 except:
358 return []
358 return []
359
359
360 if len(cand) > 1:
360 if len(cand) > 1:
361 # resort our list of potentials forward then back.
361 # resort our list of potentials forward then back.
362 cand.sort(sorter)
362 cand.sort(sorter)
363 return cand
363 return cand
364
364
365 def hashlines(self):
365 def hashlines(self):
366 self.hash = {}
366 self.hash = {}
367 for x in xrange(len(self.lines)):
367 for x in xrange(len(self.lines)):
368 s = self.lines[x]
368 s = self.lines[x]
369 self.hash.setdefault(s, []).append(x)
369 self.hash.setdefault(s, []).append(x)
370
370
371 def write_rej(self):
371 def write_rej(self):
372 # our rejects are a little different from patch(1). This always
372 # our rejects are a little different from patch(1). This always
373 # creates rejects in the same form as the original patch. A file
373 # creates rejects in the same form as the original patch. A file
374 # header is inserted so that you can run the reject through patch again
374 # header is inserted so that you can run the reject through patch again
375 # without having to type the filename.
375 # without having to type the filename.
376
376
377 if not self.rej:
377 if not self.rej:
378 return
378 return
379 if self.hunks != 1:
379 if self.hunks != 1:
380 hunkstr = "s"
380 hunkstr = "s"
381 else:
381 else:
382 hunkstr = ""
382 hunkstr = ""
383
383
384 fname = self.fname + ".rej"
384 fname = self.fname + ".rej"
385 self.ui.warn(
385 self.ui.warn(
386 _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
386 _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
387 (len(self.rej), self.hunks, hunkstr, fname))
387 (len(self.rej), self.hunks, hunkstr, fname))
388 try: os.unlink(fname)
388 try: os.unlink(fname)
389 except:
389 except:
390 pass
390 pass
391 fp = file(fname, 'wb')
391 fp = file(fname, 'wb')
392 base = os.path.basename(self.fname)
392 base = os.path.basename(self.fname)
393 fp.write("--- %s\n+++ %s\n" % (base, base))
393 fp.write("--- %s\n+++ %s\n" % (base, base))
394 for x in self.rej:
394 for x in self.rej:
395 for l in x.hunk:
395 for l in x.hunk:
396 fp.write(l)
396 fp.write(l)
397 if l[-1] != '\n':
397 if l[-1] != '\n':
398 fp.write("\n\ No newline at end of file\n")
398 fp.write("\n\ No newline at end of file\n")
399
399
400 def write(self, dest=None):
400 def write(self, dest=None):
401 if self.dirty:
401 if self.dirty:
402 if not dest:
402 if not dest:
403 dest = self.fname
403 dest = self.fname
404 st = None
404 st = None
405 try:
405 try:
406 st = os.lstat(dest)
406 st = os.lstat(dest)
407 except OSError, inst:
407 except OSError, inst:
408 if inst.errno != errno.ENOENT:
408 if inst.errno != errno.ENOENT:
409 raise
409 raise
410 if st and st.st_nlink > 1:
410 if st and st.st_nlink > 1:
411 os.unlink(dest)
411 os.unlink(dest)
412 fp = file(dest, 'wb')
412 fp = file(dest, 'wb')
413 if st and st.st_nlink > 1:
413 if st and st.st_nlink > 1:
414 os.chmod(dest, st.st_mode)
414 os.chmod(dest, st.st_mode)
415 fp.writelines(self.lines)
415 fp.writelines(self.lines)
416 fp.close()
416 fp.close()
417
417
418 def close(self):
418 def close(self):
419 self.write()
419 self.write()
420 self.write_rej()
420 self.write_rej()
421
421
422 def apply(self, h, reverse):
422 def apply(self, h, reverse):
423 if not h.complete():
423 if not h.complete():
424 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
424 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
425 (h.number, h.desc, len(h.a), h.lena, len(h.b),
425 (h.number, h.desc, len(h.a), h.lena, len(h.b),
426 h.lenb))
426 h.lenb))
427
427
428 self.hunks += 1
428 self.hunks += 1
429 if reverse:
429 if reverse:
430 h.reverse()
430 h.reverse()
431
431
432 if self.missing:
432 if self.missing:
433 self.rej.append(h)
433 self.rej.append(h)
434 return -1
434 return -1
435
435
436 if self.exists and h.createfile():
436 if self.exists and h.createfile():
437 self.ui.warn(_("file %s already exists\n") % self.fname)
437 self.ui.warn(_("file %s already exists\n") % self.fname)
438 self.rej.append(h)
438 self.rej.append(h)
439 return -1
439 return -1
440
440
441 if isinstance(h, binhunk):
441 if isinstance(h, binhunk):
442 if h.rmfile():
442 if h.rmfile():
443 os.unlink(self.fname)
443 os.unlink(self.fname)
444 else:
444 else:
445 self.lines[:] = h.new()
445 self.lines[:] = h.new()
446 self.offset += len(h.new())
446 self.offset += len(h.new())
447 self.dirty = 1
447 self.dirty = 1
448 return 0
448 return 0
449
449
450 # fast case first, no offsets, no fuzz
450 # fast case first, no offsets, no fuzz
451 old = h.old()
451 old = h.old()
452 # patch starts counting at 1 unless we are adding the file
452 # patch starts counting at 1 unless we are adding the file
453 if h.starta == 0:
453 if h.starta == 0:
454 start = 0
454 start = 0
455 else:
455 else:
456 start = h.starta + self.offset - 1
456 start = h.starta + self.offset - 1
457 orig_start = start
457 orig_start = start
458 if diffhelpers.testhunk(old, self.lines, start) == 0:
458 if diffhelpers.testhunk(old, self.lines, start) == 0:
459 if h.rmfile():
459 if h.rmfile():
460 os.unlink(self.fname)
460 os.unlink(self.fname)
461 else:
461 else:
462 self.lines[start : start + h.lena] = h.new()
462 self.lines[start : start + h.lena] = h.new()
463 self.offset += h.lenb - h.lena
463 self.offset += h.lenb - h.lena
464 self.dirty = 1
464 self.dirty = 1
465 return 0
465 return 0
466
466
467 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
467 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
468 self.hashlines()
468 self.hashlines()
469 if h.hunk[-1][0] != ' ':
469 if h.hunk[-1][0] != ' ':
470 # if the hunk tried to put something at the bottom of the file
470 # if the hunk tried to put something at the bottom of the file
471 # override the start line and use eof here
471 # override the start line and use eof here
472 search_start = len(self.lines)
472 search_start = len(self.lines)
473 else:
473 else:
474 search_start = orig_start
474 search_start = orig_start
475
475
476 for fuzzlen in xrange(3):
476 for fuzzlen in xrange(3):
477 for toponly in [ True, False ]:
477 for toponly in [ True, False ]:
478 old = h.old(fuzzlen, toponly)
478 old = h.old(fuzzlen, toponly)
479
479
480 cand = self.findlines(old[0][1:], search_start)
480 cand = self.findlines(old[0][1:], search_start)
481 for l in cand:
481 for l in cand:
482 if diffhelpers.testhunk(old, self.lines, l) == 0:
482 if diffhelpers.testhunk(old, self.lines, l) == 0:
483 newlines = h.new(fuzzlen, toponly)
483 newlines = h.new(fuzzlen, toponly)
484 self.lines[l : l + len(old)] = newlines
484 self.lines[l : l + len(old)] = newlines
485 self.offset += len(newlines) - len(old)
485 self.offset += len(newlines) - len(old)
486 self.dirty = 1
486 self.dirty = 1
487 if fuzzlen:
487 if fuzzlen:
488 fuzzstr = "with fuzz %d " % fuzzlen
488 fuzzstr = "with fuzz %d " % fuzzlen
489 f = self.ui.warn
489 f = self.ui.warn
490 self.printfile(True)
490 self.printfile(True)
491 else:
491 else:
492 fuzzstr = ""
492 fuzzstr = ""
493 f = self.ui.note
493 f = self.ui.note
494 offset = l - orig_start - fuzzlen
494 offset = l - orig_start - fuzzlen
495 if offset == 1:
495 if offset == 1:
496 linestr = "line"
496 linestr = "line"
497 else:
497 else:
498 linestr = "lines"
498 linestr = "lines"
499 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
499 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
500 (h.number, l+1, fuzzstr, offset, linestr))
500 (h.number, l+1, fuzzstr, offset, linestr))
501 return fuzzlen
501 return fuzzlen
502 self.printfile(True)
502 self.printfile(True)
503 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
503 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
504 self.rej.append(h)
504 self.rej.append(h)
505 return -1
505 return -1
506
506
507 class hunk:
507 class hunk:
508 def __init__(self, desc, num, lr, context, gitpatch=None):
508 def __init__(self, desc, num, lr, context, gitpatch=None):
509 self.number = num
509 self.number = num
510 self.desc = desc
510 self.desc = desc
511 self.hunk = [ desc ]
511 self.hunk = [ desc ]
512 self.a = []
512 self.a = []
513 self.b = []
513 self.b = []
514 if context:
514 if context:
515 self.read_context_hunk(lr)
515 self.read_context_hunk(lr)
516 else:
516 else:
517 self.read_unified_hunk(lr)
517 self.read_unified_hunk(lr)
518 self.gitpatch = gitpatch
518 self.gitpatch = gitpatch
519
519
520 def read_unified_hunk(self, lr):
520 def read_unified_hunk(self, lr):
521 m = unidesc.match(self.desc)
521 m = unidesc.match(self.desc)
522 if not m:
522 if not m:
523 raise PatchError(_("bad hunk #%d") % self.number)
523 raise PatchError(_("bad hunk #%d") % self.number)
524 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
524 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
525 if self.lena == None:
525 if self.lena == None:
526 self.lena = 1
526 self.lena = 1
527 else:
527 else:
528 self.lena = int(self.lena)
528 self.lena = int(self.lena)
529 if self.lenb == None:
529 if self.lenb == None:
530 self.lenb = 1
530 self.lenb = 1
531 else:
531 else:
532 self.lenb = int(self.lenb)
532 self.lenb = int(self.lenb)
533 self.starta = int(self.starta)
533 self.starta = int(self.starta)
534 self.startb = int(self.startb)
534 self.startb = int(self.startb)
535 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
535 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
536 # if we hit eof before finishing out the hunk, the last line will
536 # if we hit eof before finishing out the hunk, the last line will
537 # be zero length. Lets try to fix it up.
537 # be zero length. Lets try to fix it up.
538 while len(self.hunk[-1]) == 0:
538 while len(self.hunk[-1]) == 0:
539 del self.hunk[-1]
539 del self.hunk[-1]
540 del self.a[-1]
540 del self.a[-1]
541 del self.b[-1]
541 del self.b[-1]
542 self.lena -= 1
542 self.lena -= 1
543 self.lenb -= 1
543 self.lenb -= 1
544
544
545 def read_context_hunk(self, lr):
545 def read_context_hunk(self, lr):
546 self.desc = lr.readline()
546 self.desc = lr.readline()
547 m = contextdesc.match(self.desc)
547 m = contextdesc.match(self.desc)
548 if not m:
548 if not m:
549 raise PatchError(_("bad hunk #%d") % self.number)
549 raise PatchError(_("bad hunk #%d") % self.number)
550 foo, self.starta, foo2, aend, foo3 = m.groups()
550 foo, self.starta, foo2, aend, foo3 = m.groups()
551 self.starta = int(self.starta)
551 self.starta = int(self.starta)
552 if aend == None:
552 if aend == None:
553 aend = self.starta
553 aend = self.starta
554 self.lena = int(aend) - self.starta
554 self.lena = int(aend) - self.starta
555 if self.starta:
555 if self.starta:
556 self.lena += 1
556 self.lena += 1
557 for x in xrange(self.lena):
557 for x in xrange(self.lena):
558 l = lr.readline()
558 l = lr.readline()
559 if l.startswith('---'):
559 if l.startswith('---'):
560 lr.push(l)
560 lr.push(l)
561 break
561 break
562 s = l[2:]
562 s = l[2:]
563 if l.startswith('- ') or l.startswith('! '):
563 if l.startswith('- ') or l.startswith('! '):
564 u = '-' + s
564 u = '-' + s
565 elif l.startswith(' '):
565 elif l.startswith(' '):
566 u = ' ' + s
566 u = ' ' + s
567 else:
567 else:
568 raise PatchError(_("bad hunk #%d old text line %d") %
568 raise PatchError(_("bad hunk #%d old text line %d") %
569 (self.number, x))
569 (self.number, x))
570 self.a.append(u)
570 self.a.append(u)
571 self.hunk.append(u)
571 self.hunk.append(u)
572
572
573 l = lr.readline()
573 l = lr.readline()
574 if l.startswith('\ '):
574 if l.startswith('\ '):
575 s = self.a[-1][:-1]
575 s = self.a[-1][:-1]
576 self.a[-1] = s
576 self.a[-1] = s
577 self.hunk[-1] = s
577 self.hunk[-1] = s
578 l = lr.readline()
578 l = lr.readline()
579 m = contextdesc.match(l)
579 m = contextdesc.match(l)
580 if not m:
580 if not m:
581 raise PatchError(_("bad hunk #%d") % self.number)
581 raise PatchError(_("bad hunk #%d") % self.number)
582 foo, self.startb, foo2, bend, foo3 = m.groups()
582 foo, self.startb, foo2, bend, foo3 = m.groups()
583 self.startb = int(self.startb)
583 self.startb = int(self.startb)
584 if bend == None:
584 if bend == None:
585 bend = self.startb
585 bend = self.startb
586 self.lenb = int(bend) - self.startb
586 self.lenb = int(bend) - self.startb
587 if self.startb:
587 if self.startb:
588 self.lenb += 1
588 self.lenb += 1
589 hunki = 1
589 hunki = 1
590 for x in xrange(self.lenb):
590 for x in xrange(self.lenb):
591 l = lr.readline()
591 l = lr.readline()
592 if l.startswith('\ '):
592 if l.startswith('\ '):
593 s = self.b[-1][:-1]
593 s = self.b[-1][:-1]
594 self.b[-1] = s
594 self.b[-1] = s
595 self.hunk[hunki-1] = s
595 self.hunk[hunki-1] = s
596 continue
596 continue
597 if not l:
597 if not l:
598 lr.push(l)
598 lr.push(l)
599 break
599 break
600 s = l[2:]
600 s = l[2:]
601 if l.startswith('+ ') or l.startswith('! '):
601 if l.startswith('+ ') or l.startswith('! '):
602 u = '+' + s
602 u = '+' + s
603 elif l.startswith(' '):
603 elif l.startswith(' '):
604 u = ' ' + s
604 u = ' ' + s
605 elif len(self.b) == 0:
605 elif len(self.b) == 0:
606 # this can happen when the hunk does not add any lines
606 # this can happen when the hunk does not add any lines
607 lr.push(l)
607 lr.push(l)
608 break
608 break
609 else:
609 else:
610 raise PatchError(_("bad hunk #%d old text line %d") %
610 raise PatchError(_("bad hunk #%d old text line %d") %
611 (self.number, x))
611 (self.number, x))
612 self.b.append(s)
612 self.b.append(s)
613 while True:
613 while True:
614 if hunki >= len(self.hunk):
614 if hunki >= len(self.hunk):
615 h = ""
615 h = ""
616 else:
616 else:
617 h = self.hunk[hunki]
617 h = self.hunk[hunki]
618 hunki += 1
618 hunki += 1
619 if h == u:
619 if h == u:
620 break
620 break
621 elif h.startswith('-'):
621 elif h.startswith('-'):
622 continue
622 continue
623 else:
623 else:
624 self.hunk.insert(hunki-1, u)
624 self.hunk.insert(hunki-1, u)
625 break
625 break
626
626
627 if not self.a:
627 if not self.a:
628 # this happens when lines were only added to the hunk
628 # this happens when lines were only added to the hunk
629 for x in self.hunk:
629 for x in self.hunk:
630 if x.startswith('-') or x.startswith(' '):
630 if x.startswith('-') or x.startswith(' '):
631 self.a.append(x)
631 self.a.append(x)
632 if not self.b:
632 if not self.b:
633 # this happens when lines were only deleted from the hunk
633 # this happens when lines were only deleted from the hunk
634 for x in self.hunk:
634 for x in self.hunk:
635 if x.startswith('+') or x.startswith(' '):
635 if x.startswith('+') or x.startswith(' '):
636 self.b.append(x[1:])
636 self.b.append(x[1:])
637 # @@ -start,len +start,len @@
637 # @@ -start,len +start,len @@
638 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
638 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
639 self.startb, self.lenb)
639 self.startb, self.lenb)
640 self.hunk[0] = self.desc
640 self.hunk[0] = self.desc
641
641
642 def reverse(self):
642 def reverse(self):
643 origlena = self.lena
643 origlena = self.lena
644 origstarta = self.starta
644 origstarta = self.starta
645 self.lena = self.lenb
645 self.lena = self.lenb
646 self.starta = self.startb
646 self.starta = self.startb
647 self.lenb = origlena
647 self.lenb = origlena
648 self.startb = origstarta
648 self.startb = origstarta
649 self.a = []
649 self.a = []
650 self.b = []
650 self.b = []
651 # self.hunk[0] is the @@ description
651 # self.hunk[0] is the @@ description
652 for x in xrange(1, len(self.hunk)):
652 for x in xrange(1, len(self.hunk)):
653 o = self.hunk[x]
653 o = self.hunk[x]
654 if o.startswith('-'):
654 if o.startswith('-'):
655 n = '+' + o[1:]
655 n = '+' + o[1:]
656 self.b.append(o[1:])
656 self.b.append(o[1:])
657 elif o.startswith('+'):
657 elif o.startswith('+'):
658 n = '-' + o[1:]
658 n = '-' + o[1:]
659 self.a.append(n)
659 self.a.append(n)
660 else:
660 else:
661 n = o
661 n = o
662 self.b.append(o[1:])
662 self.b.append(o[1:])
663 self.a.append(o)
663 self.a.append(o)
664 self.hunk[x] = o
664 self.hunk[x] = o
665
665
666 def fix_newline(self):
666 def fix_newline(self):
667 diffhelpers.fix_newline(self.hunk, self.a, self.b)
667 diffhelpers.fix_newline(self.hunk, self.a, self.b)
668
668
669 def complete(self):
669 def complete(self):
670 return len(self.a) == self.lena and len(self.b) == self.lenb
670 return len(self.a) == self.lena and len(self.b) == self.lenb
671
671
672 def createfile(self):
672 def createfile(self):
673 create = self.gitpatch is None or self.gitpatch.op == 'ADD'
673 create = self.gitpatch is None or self.gitpatch.op == 'ADD'
674 return self.starta == 0 and self.lena == 0 and create
674 return self.starta == 0 and self.lena == 0 and create
675
675
676 def rmfile(self):
676 def rmfile(self):
677 remove = self.gitpatch is None or self.gitpatch.op == 'DELETE'
677 remove = self.gitpatch is None or self.gitpatch.op == 'DELETE'
678 return self.startb == 0 and self.lenb == 0 and remove
678 return self.startb == 0 and self.lenb == 0 and remove
679
679
680 def fuzzit(self, l, fuzz, toponly):
680 def fuzzit(self, l, fuzz, toponly):
681 # this removes context lines from the top and bottom of list 'l'. It
681 # this removes context lines from the top and bottom of list 'l'. It
682 # checks the hunk to make sure only context lines are removed, and then
682 # checks the hunk to make sure only context lines are removed, and then
683 # returns a new shortened list of lines.
683 # returns a new shortened list of lines.
684 fuzz = min(fuzz, len(l)-1)
684 fuzz = min(fuzz, len(l)-1)
685 if fuzz:
685 if fuzz:
686 top = 0
686 top = 0
687 bot = 0
687 bot = 0
688 hlen = len(self.hunk)
688 hlen = len(self.hunk)
689 for x in xrange(hlen-1):
689 for x in xrange(hlen-1):
690 # the hunk starts with the @@ line, so use x+1
690 # the hunk starts with the @@ line, so use x+1
691 if self.hunk[x+1][0] == ' ':
691 if self.hunk[x+1][0] == ' ':
692 top += 1
692 top += 1
693 else:
693 else:
694 break
694 break
695 if not toponly:
695 if not toponly:
696 for x in xrange(hlen-1):
696 for x in xrange(hlen-1):
697 if self.hunk[hlen-bot-1][0] == ' ':
697 if self.hunk[hlen-bot-1][0] == ' ':
698 bot += 1
698 bot += 1
699 else:
699 else:
700 break
700 break
701
701
702 # top and bot now count context in the hunk
702 # top and bot now count context in the hunk
703 # adjust them if either one is short
703 # adjust them if either one is short
704 context = max(top, bot, 3)
704 context = max(top, bot, 3)
705 if bot < context:
705 if bot < context:
706 bot = max(0, fuzz - (context - bot))
706 bot = max(0, fuzz - (context - bot))
707 else:
707 else:
708 bot = min(fuzz, bot)
708 bot = min(fuzz, bot)
709 if top < context:
709 if top < context:
710 top = max(0, fuzz - (context - top))
710 top = max(0, fuzz - (context - top))
711 else:
711 else:
712 top = min(fuzz, top)
712 top = min(fuzz, top)
713
713
714 return l[top:len(l)-bot]
714 return l[top:len(l)-bot]
715 return l
715 return l
716
716
717 def old(self, fuzz=0, toponly=False):
717 def old(self, fuzz=0, toponly=False):
718 return self.fuzzit(self.a, fuzz, toponly)
718 return self.fuzzit(self.a, fuzz, toponly)
719
719
720 def newctrl(self):
720 def newctrl(self):
721 res = []
721 res = []
722 for x in self.hunk:
722 for x in self.hunk:
723 c = x[0]
723 c = x[0]
724 if c == ' ' or c == '+':
724 if c == ' ' or c == '+':
725 res.append(x)
725 res.append(x)
726 return res
726 return res
727
727
728 def new(self, fuzz=0, toponly=False):
728 def new(self, fuzz=0, toponly=False):
729 return self.fuzzit(self.b, fuzz, toponly)
729 return self.fuzzit(self.b, fuzz, toponly)
730
730
731 class binhunk:
731 class binhunk:
732 'A binary patch file. Only understands literals so far.'
732 'A binary patch file. Only understands literals so far.'
733 def __init__(self, gitpatch):
733 def __init__(self, gitpatch):
734 self.gitpatch = gitpatch
734 self.gitpatch = gitpatch
735 self.text = None
735 self.text = None
736 self.hunk = ['GIT binary patch\n']
736 self.hunk = ['GIT binary patch\n']
737
737
738 def createfile(self):
738 def createfile(self):
739 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
739 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
740
740
741 def rmfile(self):
741 def rmfile(self):
742 return self.gitpatch.op == 'DELETE'
742 return self.gitpatch.op == 'DELETE'
743
743
744 def complete(self):
744 def complete(self):
745 return self.text is not None
745 return self.text is not None
746
746
747 def new(self):
747 def new(self):
748 return [self.text]
748 return [self.text]
749
749
750 def extract(self, fp):
750 def extract(self, fp):
751 line = fp.readline()
751 line = fp.readline()
752 self.hunk.append(line)
752 self.hunk.append(line)
753 while line and not line.startswith('literal '):
753 while line and not line.startswith('literal '):
754 line = fp.readline()
754 line = fp.readline()
755 self.hunk.append(line)
755 self.hunk.append(line)
756 if not line:
756 if not line:
757 raise PatchError(_('could not extract binary patch'))
757 raise PatchError(_('could not extract binary patch'))
758 size = int(line[8:].rstrip())
758 size = int(line[8:].rstrip())
759 dec = []
759 dec = []
760 line = fp.readline()
760 line = fp.readline()
761 self.hunk.append(line)
761 self.hunk.append(line)
762 while len(line) > 1:
762 while len(line) > 1:
763 l = line[0]
763 l = line[0]
764 if l <= 'Z' and l >= 'A':
764 if l <= 'Z' and l >= 'A':
765 l = ord(l) - ord('A') + 1
765 l = ord(l) - ord('A') + 1
766 else:
766 else:
767 l = ord(l) - ord('a') + 27
767 l = ord(l) - ord('a') + 27
768 dec.append(base85.b85decode(line[1:-1])[:l])
768 dec.append(base85.b85decode(line[1:-1])[:l])
769 line = fp.readline()
769 line = fp.readline()
770 self.hunk.append(line)
770 self.hunk.append(line)
771 text = zlib.decompress(''.join(dec))
771 text = zlib.decompress(''.join(dec))
772 if len(text) != size:
772 if len(text) != size:
773 raise PatchError(_('binary patch is %d bytes, not %d') %
773 raise PatchError(_('binary patch is %d bytes, not %d') %
774 len(text), size)
774 len(text), size)
775 self.text = text
775 self.text = text
776
776
777 def parsefilename(str):
777 def parsefilename(str):
778 # --- filename \t|space stuff
778 # --- filename \t|space stuff
779 s = str[4:].rstrip('\r\n')
779 s = str[4:].rstrip('\r\n')
780 i = s.find('\t')
780 i = s.find('\t')
781 if i < 0:
781 if i < 0:
782 i = s.find(' ')
782 i = s.find(' ')
783 if i < 0:
783 if i < 0:
784 return s
784 return s
785 return s[:i]
785 return s[:i]
786
786
787 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
787 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
788 def pathstrip(path, count=1):
788 def pathstrip(path, count=1):
789 pathlen = len(path)
789 pathlen = len(path)
790 i = 0
790 i = 0
791 if count == 0:
791 if count == 0:
792 return path.rstrip()
792 return path.rstrip()
793 while count > 0:
793 while count > 0:
794 i = path.find('/', i)
794 i = path.find('/', i)
795 if i == -1:
795 if i == -1:
796 raise PatchError(_("unable to strip away %d dirs from %s") %
796 raise PatchError(_("unable to strip away %d dirs from %s") %
797 (count, path))
797 (count, path))
798 i += 1
798 i += 1
799 # consume '//' in the path
799 # consume '//' in the path
800 while i < pathlen - 1 and path[i] == '/':
800 while i < pathlen - 1 and path[i] == '/':
801 i += 1
801 i += 1
802 count -= 1
802 count -= 1
803 return path[i:].rstrip()
803 return path[i:].rstrip()
804
804
805 nulla = afile_orig == "/dev/null"
805 nulla = afile_orig == "/dev/null"
806 nullb = bfile_orig == "/dev/null"
806 nullb = bfile_orig == "/dev/null"
807 afile = pathstrip(afile_orig, strip)
807 afile = pathstrip(afile_orig, strip)
808 gooda = not nulla and os.path.exists(afile)
808 gooda = not nulla and os.path.exists(afile)
809 bfile = pathstrip(bfile_orig, strip)
809 bfile = pathstrip(bfile_orig, strip)
810 if afile == bfile:
810 if afile == bfile:
811 goodb = gooda
811 goodb = gooda
812 else:
812 else:
813 goodb = not nullb and os.path.exists(bfile)
813 goodb = not nullb and os.path.exists(bfile)
814 createfunc = hunk.createfile
814 createfunc = hunk.createfile
815 if reverse:
815 if reverse:
816 createfunc = hunk.rmfile
816 createfunc = hunk.rmfile
817 missing = not goodb and not gooda and not createfunc()
817 missing = not goodb and not gooda and not createfunc()
818 fname = None
818 fname = None
819 if not missing:
819 if not missing:
820 if gooda and goodb:
820 if gooda and goodb:
821 fname = (afile in bfile) and afile or bfile
821 fname = (afile in bfile) and afile or bfile
822 elif gooda:
822 elif gooda:
823 fname = afile
823 fname = afile
824
824
825 if not fname:
825 if not fname:
826 if not nullb:
826 if not nullb:
827 fname = (afile in bfile) and afile or bfile
827 fname = (afile in bfile) and afile or bfile
828 elif not nulla:
828 elif not nulla:
829 fname = afile
829 fname = afile
830 else:
830 else:
831 raise PatchError(_("undefined source and destination files"))
831 raise PatchError(_("undefined source and destination files"))
832
832
833 return fname, missing
833 return fname, missing
834
834
835 class linereader:
835 class linereader:
836 # simple class to allow pushing lines back into the input stream
836 # simple class to allow pushing lines back into the input stream
837 def __init__(self, fp):
837 def __init__(self, fp):
838 self.fp = fp
838 self.fp = fp
839 self.buf = []
839 self.buf = []
840
840
841 def push(self, line):
841 def push(self, line):
842 self.buf.append(line)
842 self.buf.append(line)
843
843
844 def readline(self):
844 def readline(self):
845 if self.buf:
845 if self.buf:
846 l = self.buf[0]
846 l = self.buf[0]
847 del self.buf[0]
847 del self.buf[0]
848 return l
848 return l
849 return self.fp.readline()
849 return self.fp.readline()
850
850
851 def iterhunks(ui, fp, sourcefile=None):
851 def iterhunks(ui, fp, sourcefile=None):
852 """Read a patch and yield the following events:
852 """Read a patch and yield the following events:
853 - ("file", afile, bfile, firsthunk): select a new target file.
853 - ("file", afile, bfile, firsthunk): select a new target file.
854 - ("hunk", hunk): a new hunk is ready to be applied, follows a
854 - ("hunk", hunk): a new hunk is ready to be applied, follows a
855 "file" event.
855 "file" event.
856 - ("git", gitchanges): current diff is in git format, gitchanges
856 - ("git", gitchanges): current diff is in git format, gitchanges
857 maps filenames to gitpatch records. Unique event.
857 maps filenames to gitpatch records. Unique event.
858 """
858 """
859
859
860 def scangitpatch(fp, firstline):
860 def scangitpatch(fp, firstline):
861 '''git patches can modify a file, then copy that file to
861 '''git patches can modify a file, then copy that file to
862 a new file, but expect the source to be the unmodified form.
862 a new file, but expect the source to be the unmodified form.
863 So we scan the patch looking for that case so we can do
863 So we scan the patch looking for that case so we can do
864 the copies ahead of time.'''
864 the copies ahead of time.'''
865
865
866 pos = 0
866 pos = 0
867 try:
867 try:
868 pos = fp.tell()
868 pos = fp.tell()
869 except IOError:
869 except IOError:
870 fp = cStringIO.StringIO(fp.read())
870 fp = cStringIO.StringIO(fp.read())
871
871
872 (dopatch, gitpatches) = readgitpatch(fp, firstline)
872 (dopatch, gitpatches) = readgitpatch(fp, firstline)
873 fp.seek(pos)
873 fp.seek(pos)
874
874
875 return fp, dopatch, gitpatches
875 return fp, dopatch, gitpatches
876
876
877 changed = {}
877 changed = {}
878 current_hunk = None
878 current_hunk = None
879 afile = ""
879 afile = ""
880 bfile = ""
880 bfile = ""
881 state = None
881 state = None
882 hunknum = 0
882 hunknum = 0
883 emitfile = False
883 emitfile = False
884
884
885 git = False
885 git = False
886 gitre = re.compile('diff --git (a/.*) (b/.*)')
886 gitre = re.compile('diff --git (a/.*) (b/.*)')
887
887
888 # our states
888 # our states
889 BFILE = 1
889 BFILE = 1
890 context = None
890 context = None
891 lr = linereader(fp)
891 lr = linereader(fp)
892 dopatch = True
892 dopatch = True
893 # gitworkdone is True if a git operation (copy, rename, ...) was
893 # gitworkdone is True if a git operation (copy, rename, ...) was
894 # performed already for the current file. Useful when the file
894 # performed already for the current file. Useful when the file
895 # section may have no hunk.
895 # section may have no hunk.
896 gitworkdone = False
896 gitworkdone = False
897
897
898 while True:
898 while True:
899 newfile = False
899 newfile = False
900 x = lr.readline()
900 x = lr.readline()
901 if not x:
901 if not x:
902 break
902 break
903 if current_hunk:
903 if current_hunk:
904 if x.startswith('\ '):
904 if x.startswith('\ '):
905 current_hunk.fix_newline()
905 current_hunk.fix_newline()
906 yield 'hunk', current_hunk
906 yield 'hunk', current_hunk
907 current_hunk = None
907 current_hunk = None
908 gitworkdone = False
908 gitworkdone = False
909 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
909 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
910 ((context or context == None) and x.startswith('***************')))):
910 ((context or context == None) and x.startswith('***************')))):
911 try:
911 try:
912 if context == None and x.startswith('***************'):
912 if context == None and x.startswith('***************'):
913 context = True
913 context = True
914 gpatch = changed.get(bfile[2:], (None, None))[1]
914 gpatch = changed.get(bfile[2:], (None, None))[1]
915 current_hunk = hunk(x, hunknum + 1, lr, context, gpatch)
915 current_hunk = hunk(x, hunknum + 1, lr, context, gpatch)
916 except PatchError, err:
916 except PatchError, err:
917 ui.debug(err)
917 ui.debug(err)
918 current_hunk = None
918 current_hunk = None
919 continue
919 continue
920 hunknum += 1
920 hunknum += 1
921 if emitfile:
921 if emitfile:
922 emitfile = False
922 emitfile = False
923 yield 'file', (afile, bfile, current_hunk)
923 yield 'file', (afile, bfile, current_hunk)
924 elif state == BFILE and x.startswith('GIT binary patch'):
924 elif state == BFILE and x.startswith('GIT binary patch'):
925 current_hunk = binhunk(changed[bfile[2:]][1])
925 current_hunk = binhunk(changed[bfile[2:]][1])
926 hunknum += 1
926 hunknum += 1
927 if emitfile:
927 if emitfile:
928 emitfile = False
928 emitfile = False
929 yield 'file', (afile, bfile, current_hunk)
929 yield 'file', (afile, bfile, current_hunk)
930 current_hunk.extract(fp)
930 current_hunk.extract(fp)
931 elif x.startswith('diff --git'):
931 elif x.startswith('diff --git'):
932 # check for git diff, scanning the whole patch file if needed
932 # check for git diff, scanning the whole patch file if needed
933 m = gitre.match(x)
933 m = gitre.match(x)
934 if m:
934 if m:
935 afile, bfile = m.group(1, 2)
935 afile, bfile = m.group(1, 2)
936 if not git:
936 if not git:
937 git = True
937 git = True
938 fp, dopatch, gitpatches = scangitpatch(fp, x)
938 fp, dopatch, gitpatches = scangitpatch(fp, x)
939 yield 'git', gitpatches
939 yield 'git', gitpatches
940 for gp in gitpatches:
940 for gp in gitpatches:
941 changed[gp.path] = (gp.op, gp)
941 changed[gp.path] = (gp.op, gp)
942 # else error?
942 # else error?
943 # copy/rename + modify should modify target, not source
943 # copy/rename + modify should modify target, not source
944 gitop = changed.get(bfile[2:], (None, None))[0]
944 gitop = changed.get(bfile[2:], (None, None))[0]
945 if gitop in ('COPY', 'DELETE', 'RENAME'):
945 if gitop in ('COPY', 'DELETE', 'RENAME'):
946 afile = bfile
946 afile = bfile
947 gitworkdone = True
947 gitworkdone = True
948 newfile = True
948 newfile = True
949 elif x.startswith('---'):
949 elif x.startswith('---'):
950 # check for a unified diff
950 # check for a unified diff
951 l2 = lr.readline()
951 l2 = lr.readline()
952 if not l2.startswith('+++'):
952 if not l2.startswith('+++'):
953 lr.push(l2)
953 lr.push(l2)
954 continue
954 continue
955 newfile = True
955 newfile = True
956 context = False
956 context = False
957 afile = parsefilename(x)
957 afile = parsefilename(x)
958 bfile = parsefilename(l2)
958 bfile = parsefilename(l2)
959 elif x.startswith('***'):
959 elif x.startswith('***'):
960 # check for a context diff
960 # check for a context diff
961 l2 = lr.readline()
961 l2 = lr.readline()
962 if not l2.startswith('---'):
962 if not l2.startswith('---'):
963 lr.push(l2)
963 lr.push(l2)
964 continue
964 continue
965 l3 = lr.readline()
965 l3 = lr.readline()
966 lr.push(l3)
966 lr.push(l3)
967 if not l3.startswith("***************"):
967 if not l3.startswith("***************"):
968 lr.push(l2)
968 lr.push(l2)
969 continue
969 continue
970 newfile = True
970 newfile = True
971 context = True
971 context = True
972 afile = parsefilename(x)
972 afile = parsefilename(x)
973 bfile = parsefilename(l2)
973 bfile = parsefilename(l2)
974
974
975 if newfile:
975 if newfile:
976 emitfile = True
976 emitfile = True
977 state = BFILE
977 state = BFILE
978 hunknum = 0
978 hunknum = 0
979 if current_hunk:
979 if current_hunk:
980 if current_hunk.complete():
980 if current_hunk.complete():
981 yield 'hunk', current_hunk
981 yield 'hunk', current_hunk
982 else:
982 else:
983 raise PatchError(_("malformed patch %s %s") % (afile,
983 raise PatchError(_("malformed patch %s %s") % (afile,
984 current_hunk.desc))
984 current_hunk.desc))
985
985
986 if hunknum == 0 and dopatch and not gitworkdone:
986 if hunknum == 0 and dopatch and not gitworkdone:
987 raise NoHunks
987 raise NoHunks
988
988
989 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
989 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
990 rejmerge=None, updatedir=None):
990 rejmerge=None, updatedir=None):
991 """reads a patch from fp and tries to apply it. The dict 'changed' is
991 """reads a patch from fp and tries to apply it. The dict 'changed' is
992 filled in with all of the filenames changed by the patch. Returns 0
992 filled in with all of the filenames changed by the patch. Returns 0
993 for a clean patch, -1 if any rejects were found and 1 if there was
993 for a clean patch, -1 if any rejects were found and 1 if there was
994 any fuzz."""
994 any fuzz."""
995
995
996 rejects = 0
996 rejects = 0
997 err = 0
997 err = 0
998 current_file = None
998 current_file = None
999 gitpatches = None
999 gitpatches = None
1000
1000
1001 def closefile():
1001 def closefile():
1002 if not current_file:
1002 if not current_file:
1003 return 0
1003 return 0
1004 current_file.close()
1004 current_file.close()
1005 if rejmerge:
1005 if rejmerge:
1006 rejmerge(current_file)
1006 rejmerge(current_file)
1007 return len(current_file.rej)
1007 return len(current_file.rej)
1008
1008
1009 for state, values in iterhunks(ui, fp, sourcefile):
1009 for state, values in iterhunks(ui, fp, sourcefile):
1010 if state == 'hunk':
1010 if state == 'hunk':
1011 if not current_file:
1011 if not current_file:
1012 continue
1012 continue
1013 current_hunk = values
1013 current_hunk = values
1014 ret = current_file.apply(current_hunk, reverse)
1014 ret = current_file.apply(current_hunk, reverse)
1015 if ret >= 0:
1015 if ret >= 0:
1016 changed.setdefault(current_file.fname, (None, None))
1016 changed.setdefault(current_file.fname, (None, None))
1017 if ret > 0:
1017 if ret > 0:
1018 err = 1
1018 err = 1
1019 elif state == 'file':
1019 elif state == 'file':
1020 rejects += closefile()
1020 rejects += closefile()
1021 afile, bfile, first_hunk = values
1021 afile, bfile, first_hunk = values
1022 try:
1022 try:
1023 if sourcefile:
1023 if sourcefile:
1024 current_file = patchfile(ui, sourcefile)
1024 current_file = patchfile(ui, sourcefile)
1025 else:
1025 else:
1026 current_file, missing = selectfile(afile, bfile, first_hunk,
1026 current_file, missing = selectfile(afile, bfile, first_hunk,
1027 strip, reverse)
1027 strip, reverse)
1028 current_file = patchfile(ui, current_file, missing)
1028 current_file = patchfile(ui, current_file, missing)
1029 except PatchError, err:
1029 except PatchError, err:
1030 ui.warn(str(err) + '\n')
1030 ui.warn(str(err) + '\n')
1031 current_file, current_hunk = None, None
1031 current_file, current_hunk = None, None
1032 rejects += 1
1032 rejects += 1
1033 continue
1033 continue
1034 elif state == 'git':
1034 elif state == 'git':
1035 gitpatches = values
1035 gitpatches = values
1036 for gp in gitpatches:
1036 for gp in gitpatches:
1037 if gp.op in ('COPY', 'RENAME'):
1037 if gp.op in ('COPY', 'RENAME'):
1038 copyfile(gp.oldpath, gp.path)
1038 copyfile(gp.oldpath, gp.path)
1039 changed[gp.path] = (gp.op, gp)
1039 changed[gp.path] = (gp.op, gp)
1040 else:
1040 else:
1041 raise util.Abort(_('unsupported parser state: %s') % state)
1041 raise util.Abort(_('unsupported parser state: %s') % state)
1042
1042
1043 rejects += closefile()
1043 rejects += closefile()
1044
1044
1045 if updatedir and gitpatches:
1045 if updatedir and gitpatches:
1046 updatedir(gitpatches)
1046 updatedir(gitpatches)
1047 if rejects:
1047 if rejects:
1048 return -1
1048 return -1
1049 return err
1049 return err
1050
1050
1051 def diffopts(ui, opts={}, untrusted=False):
1051 def diffopts(ui, opts={}, untrusted=False):
1052 def get(key, name=None):
1052 def get(key, name=None):
1053 return (opts.get(key) or
1053 return (opts.get(key) or
1054 ui.configbool('diff', name or key, None, untrusted=untrusted))
1054 ui.configbool('diff', name or key, None, untrusted=untrusted))
1055 return mdiff.diffopts(
1055 return mdiff.diffopts(
1056 text=opts.get('text'),
1056 text=opts.get('text'),
1057 git=get('git'),
1057 git=get('git'),
1058 nodates=get('nodates'),
1058 nodates=get('nodates'),
1059 showfunc=get('show_function', 'showfunc'),
1059 showfunc=get('show_function', 'showfunc'),
1060 ignorews=get('ignore_all_space', 'ignorews'),
1060 ignorews=get('ignore_all_space', 'ignorews'),
1061 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1061 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1062 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1062 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1063 context=get('unified'))
1063 context=get('unified'))
1064
1064
1065 def updatedir(ui, repo, patches):
1065 def updatedir(ui, repo, patches):
1066 '''Update dirstate after patch application according to metadata'''
1066 '''Update dirstate after patch application according to metadata'''
1067 if not patches:
1067 if not patches:
1068 return
1068 return
1069 copies = []
1069 copies = []
1070 removes = {}
1070 removes = {}
1071 cfiles = patches.keys()
1071 cfiles = patches.keys()
1072 cwd = repo.getcwd()
1072 cwd = repo.getcwd()
1073 if cwd:
1073 if cwd:
1074 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1074 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1075 for f in patches:
1075 for f in patches:
1076 ctype, gp = patches[f]
1076 ctype, gp = patches[f]
1077 if ctype == 'RENAME':
1077 if ctype == 'RENAME':
1078 copies.append((gp.oldpath, gp.path))
1078 copies.append((gp.oldpath, gp.path))
1079 removes[gp.oldpath] = 1
1079 removes[gp.oldpath] = 1
1080 elif ctype == 'COPY':
1080 elif ctype == 'COPY':
1081 copies.append((gp.oldpath, gp.path))
1081 copies.append((gp.oldpath, gp.path))
1082 elif ctype == 'DELETE':
1082 elif ctype == 'DELETE':
1083 removes[gp.path] = 1
1083 removes[gp.path] = 1
1084 for src, dst in copies:
1084 for src, dst in copies:
1085 repo.copy(src, dst)
1085 repo.copy(src, dst)
1086 removes = removes.keys()
1086 removes = removes.keys()
1087 if removes:
1087 if removes:
1088 removes.sort()
1088 removes.sort()
1089 repo.remove(removes, True)
1089 repo.remove(removes, True)
1090 for f in patches:
1090 for f in patches:
1091 ctype, gp = patches[f]
1091 ctype, gp = patches[f]
1092 if gp and gp.mode:
1092 if gp and gp.mode:
1093 flags = ''
1093 flags = ''
1094 if gp.mode & 0100:
1094 if gp.mode & 0100:
1095 flags = 'x'
1095 flags = 'x'
1096 elif gp.mode & 020000:
1096 elif gp.mode & 020000:
1097 flags = 'l'
1097 flags = 'l'
1098 dst = os.path.join(repo.root, gp.path)
1098 dst = os.path.join(repo.root, gp.path)
1099 # patch won't create empty files
1099 # patch won't create empty files
1100 if ctype == 'ADD' and not os.path.exists(dst):
1100 if ctype == 'ADD' and not os.path.exists(dst):
1101 repo.wwrite(gp.path, '', flags)
1101 repo.wwrite(gp.path, '', flags)
1102 else:
1102 else:
1103 util.set_flags(dst, flags)
1103 util.set_flags(dst, flags)
1104 cmdutil.addremove(repo, cfiles)
1104 cmdutil.addremove(repo, cfiles)
1105 files = patches.keys()
1105 files = patches.keys()
1106 files.extend([r for r in removes if r not in files])
1106 files.extend([r for r in removes if r not in files])
1107 files.sort()
1107 files.sort()
1108
1108
1109 return files
1109 return files
1110
1110
1111 def b85diff(to, tn):
1111 def b85diff(to, tn):
1112 '''print base85-encoded binary diff'''
1112 '''print base85-encoded binary diff'''
1113 def gitindex(text):
1113 def gitindex(text):
1114 if not text:
1114 if not text:
1115 return '0' * 40
1115 return '0' * 40
1116 l = len(text)
1116 l = len(text)
1117 s = sha.new('blob %d\0' % l)
1117 s = sha.new('blob %d\0' % l)
1118 s.update(text)
1118 s.update(text)
1119 return s.hexdigest()
1119 return s.hexdigest()
1120
1120
1121 def fmtline(line):
1121 def fmtline(line):
1122 l = len(line)
1122 l = len(line)
1123 if l <= 26:
1123 if l <= 26:
1124 l = chr(ord('A') + l - 1)
1124 l = chr(ord('A') + l - 1)
1125 else:
1125 else:
1126 l = chr(l - 26 + ord('a') - 1)
1126 l = chr(l - 26 + ord('a') - 1)
1127 return '%c%s\n' % (l, base85.b85encode(line, True))
1127 return '%c%s\n' % (l, base85.b85encode(line, True))
1128
1128
1129 def chunk(text, csize=52):
1129 def chunk(text, csize=52):
1130 l = len(text)
1130 l = len(text)
1131 i = 0
1131 i = 0
1132 while i < l:
1132 while i < l:
1133 yield text[i:i+csize]
1133 yield text[i:i+csize]
1134 i += csize
1134 i += csize
1135
1135
1136 tohash = gitindex(to)
1136 tohash = gitindex(to)
1137 tnhash = gitindex(tn)
1137 tnhash = gitindex(tn)
1138 if tohash == tnhash:
1138 if tohash == tnhash:
1139 return ""
1139 return ""
1140
1140
1141 # TODO: deltas
1141 # TODO: deltas
1142 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1142 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1143 (tohash, tnhash, len(tn))]
1143 (tohash, tnhash, len(tn))]
1144 for l in chunk(zlib.compress(tn)):
1144 for l in chunk(zlib.compress(tn)):
1145 ret.append(fmtline(l))
1145 ret.append(fmtline(l))
1146 ret.append('\n')
1146 ret.append('\n')
1147 return ''.join(ret)
1147 return ''.join(ret)
1148
1148
1149 def diff(repo, node1=None, node2=None, files=None, match=util.always,
1149 def diff(repo, node1=None, node2=None, files=None, match=util.always,
1150 fp=None, changes=None, opts=None):
1150 fp=None, changes=None, opts=None):
1151 '''print diff of changes to files between two nodes, or node and
1151 '''print diff of changes to files between two nodes, or node and
1152 working directory.
1152 working directory.
1153
1153
1154 if node1 is None, use first dirstate parent instead.
1154 if node1 is None, use first dirstate parent instead.
1155 if node2 is None, compare node1 with working directory.'''
1155 if node2 is None, compare node1 with working directory.'''
1156
1156
1157 if opts is None:
1157 if opts is None:
1158 opts = mdiff.defaultopts
1158 opts = mdiff.defaultopts
1159 if fp is None:
1159 if fp is None:
1160 fp = repo.ui
1160 fp = repo.ui
1161
1161
1162 if not node1:
1162 if not node1:
1163 node1 = repo.dirstate.parents()[0]
1163 node1 = repo.dirstate.parents()[0]
1164
1164
1165 ccache = {}
1165 ccache = {}
1166 def getctx(r):
1166 def getctx(r):
1167 if r not in ccache:
1167 if r not in ccache:
1168 ccache[r] = context.changectx(repo, r)
1168 ccache[r] = context.changectx(repo, r)
1169 return ccache[r]
1169 return ccache[r]
1170
1170
1171 flcache = {}
1171 flcache = {}
1172 def getfilectx(f, ctx):
1172 def getfilectx(f, ctx):
1173 flctx = ctx.filectx(f, filelog=flcache.get(f))
1173 flctx = ctx.filectx(f, filelog=flcache.get(f))
1174 if f not in flcache:
1174 if f not in flcache:
1175 flcache[f] = flctx._filelog
1175 flcache[f] = flctx._filelog
1176 return flctx
1176 return flctx
1177
1177
1178 # reading the data for node1 early allows it to play nicely
1178 # reading the data for node1 early allows it to play nicely
1179 # with repo.status and the revlog cache.
1179 # with repo.status and the revlog cache.
1180 ctx1 = context.changectx(repo, node1)
1180 ctx1 = context.changectx(repo, node1)
1181 # force manifest reading
1181 # force manifest reading
1182 man1 = ctx1.manifest()
1182 man1 = ctx1.manifest()
1183 date1 = util.datestr(ctx1.date())
1183 date1 = util.datestr(ctx1.date())
1184
1184
1185 if not changes:
1185 if not changes:
1186 changes = repo.status(node1, node2, files, match=match)[:5]
1186 changes = repo.status(node1, node2, files, match=match)[:5]
1187 modified, added, removed, deleted, unknown = changes
1187 modified, added, removed, deleted, unknown = changes
1188
1188
1189 if not modified and not added and not removed:
1189 if not modified and not added and not removed:
1190 return
1190 return
1191
1191
1192 if node2:
1192 if node2:
1193 ctx2 = context.changectx(repo, node2)
1193 ctx2 = context.changectx(repo, node2)
1194 execf2 = ctx2.manifest().execf
1194 execf2 = ctx2.manifest().execf
1195 linkf2 = ctx2.manifest().linkf
1195 linkf2 = ctx2.manifest().linkf
1196 else:
1196 else:
1197 ctx2 = context.workingctx(repo)
1197 ctx2 = context.workingctx(repo)
1198 execf2 = util.execfunc(repo.root, None)
1198 execf2 = util.execfunc(repo.root, None)
1199 linkf2 = util.linkfunc(repo.root, None)
1199 linkf2 = util.linkfunc(repo.root, None)
1200 if execf2 is None:
1200 if execf2 is None:
1201 mc = ctx2.parents()[0].manifest().copy()
1201 mc = ctx2.parents()[0].manifest().copy()
1202 execf2 = mc.execf
1202 execf2 = mc.execf
1203 linkf2 = mc.linkf
1203 linkf2 = mc.linkf
1204
1204
1205 # returns False if there was no rename between ctx1 and ctx2
1206 # returns None if the file was created between ctx1 and ctx2
1207 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
1208 # This will only really work if c1 is the Nth 1st parent of c2.
1209 def renamed(c1, c2, man, f):
1210 startrev = c1.rev()
1211 c = c2
1212 crev = c.rev()
1213 if crev is None:
1214 crev = repo.changelog.count()
1215 orig = f
1216 files = (f,)
1217 while crev > startrev:
1218 if f in files:
1219 try:
1220 src = getfilectx(f, c).renamed()
1221 except revlog.LookupError:
1222 return None
1223 if src:
1224 f = src[0]
1225 crev = c.parents()[0].rev()
1226 # try to reuse
1227 c = getctx(crev)
1228 files = c.files()
1229 if f not in man:
1230 return None
1231 if f == orig:
1232 return False
1233 return f
1234
1235 if repo.ui.quiet:
1205 if repo.ui.quiet:
1236 r = None
1206 r = None
1237 else:
1207 else:
1238 hexfunc = repo.ui.debugflag and hex or short
1208 hexfunc = repo.ui.debugflag and hex or short
1239 r = [hexfunc(node) for node in [node1, node2] if node]
1209 r = [hexfunc(node) for node in [node1, node2] if node]
1240
1210
1241 if opts.git:
1211 if opts.git:
1242 copied = {}
1212 copy, diverge = copies.copies(repo, ctx1, ctx2, repo.changectx(nullid))
1243 c1, c2 = ctx1, ctx2
1213 for k, v in copy.items():
1244 files = added
1214 copy[v] = k
1245 man = man1
1246 if node2 and ctx1.rev() >= ctx2.rev():
1247 # renamed() starts at c2 and walks back in history until c1.
1248 # Since ctx1.rev() >= ctx2.rev(), invert ctx2 and ctx1 to
1249 # detect (inverted) copies.
1250 c1, c2 = ctx2, ctx1
1251 files = removed
1252 man = ctx2.manifest()
1253 for f in files:
1254 src = renamed(c1, c2, man, f)
1255 if src:
1256 copied[f] = src
1257 if ctx1 == c2:
1258 # invert the copied dict
1259 copied = dict([(v, k) for (k, v) in copied.iteritems()])
1260 # If we've renamed file foo to bar (copied['bar'] = 'foo'),
1261 # avoid showing a diff for foo if we're going to show
1262 # the rename to bar.
1263 srcs = [x[1] for x in copied.iteritems() if x[0] in added]
1264
1215
1265 all = modified + added + removed
1216 all = modified + added + removed
1266 all.sort()
1217 all.sort()
1267 gone = {}
1218 gone = {}
1268
1219
1269 for f in all:
1220 for f in all:
1270 to = None
1221 to = None
1271 tn = None
1222 tn = None
1272 dodiff = True
1223 dodiff = True
1273 header = []
1224 header = []
1274 if f in man1:
1225 if f in man1:
1275 to = getfilectx(f, ctx1).data()
1226 to = getfilectx(f, ctx1).data()
1276 if f not in removed:
1227 if f not in removed:
1277 tn = getfilectx(f, ctx2).data()
1228 tn = getfilectx(f, ctx2).data()
1278 a, b = f, f
1229 a, b = f, f
1279 if opts.git:
1230 if opts.git:
1280 def gitmode(x, l):
1231 def gitmode(x, l):
1281 return l and '120000' or (x and '100755' or '100644')
1232 return l and '120000' or (x and '100755' or '100644')
1282 def addmodehdr(header, omode, nmode):
1233 def addmodehdr(header, omode, nmode):
1283 if omode != nmode:
1234 if omode != nmode:
1284 header.append('old mode %s\n' % omode)
1235 header.append('old mode %s\n' % omode)
1285 header.append('new mode %s\n' % nmode)
1236 header.append('new mode %s\n' % nmode)
1286
1237
1287 if f in added:
1238 if f in added:
1288 mode = gitmode(execf2(f), linkf2(f))
1239 mode = gitmode(execf2(f), linkf2(f))
1289 if f in copied:
1240 if f in copy:
1290 a = copied[f]
1241 a = copy[f]
1291 omode = gitmode(man1.execf(a), man1.linkf(a))
1242 omode = gitmode(man1.execf(a), man1.linkf(a))
1292 addmodehdr(header, omode, mode)
1243 addmodehdr(header, omode, mode)
1293 if a in removed and a not in gone:
1244 if a in removed and a not in gone:
1294 op = 'rename'
1245 op = 'rename'
1295 gone[a] = 1
1246 gone[a] = 1
1296 else:
1247 else:
1297 op = 'copy'
1248 op = 'copy'
1298 header.append('%s from %s\n' % (op, a))
1249 header.append('%s from %s\n' % (op, a))
1299 header.append('%s to %s\n' % (op, f))
1250 header.append('%s to %s\n' % (op, f))
1300 to = getfilectx(a, ctx1).data()
1251 to = getfilectx(a, ctx1).data()
1301 else:
1252 else:
1302 header.append('new file mode %s\n' % mode)
1253 header.append('new file mode %s\n' % mode)
1303 if util.binary(tn):
1254 if util.binary(tn):
1304 dodiff = 'binary'
1255 dodiff = 'binary'
1305 elif f in removed:
1256 elif f in removed:
1306 if f in srcs:
1257 # have we already reported a copy above?
1258 if f in copy and copy[f] in added and copy[copy[f]] == f:
1307 dodiff = False
1259 dodiff = False
1308 else:
1260 else:
1309 mode = gitmode(man1.execf(f), man1.linkf(f))
1261 mode = gitmode(man1.execf(f), man1.linkf(f))
1310 header.append('deleted file mode %s\n' % mode)
1262 header.append('deleted file mode %s\n' % mode)
1311 else:
1263 else:
1312 omode = gitmode(man1.execf(f), man1.linkf(f))
1264 omode = gitmode(man1.execf(f), man1.linkf(f))
1313 nmode = gitmode(execf2(f), linkf2(f))
1265 nmode = gitmode(execf2(f), linkf2(f))
1314 addmodehdr(header, omode, nmode)
1266 addmodehdr(header, omode, nmode)
1315 if util.binary(to) or util.binary(tn):
1267 if util.binary(to) or util.binary(tn):
1316 dodiff = 'binary'
1268 dodiff = 'binary'
1317 r = None
1269 r = None
1318 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1270 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1319 if dodiff:
1271 if dodiff:
1320 if dodiff == 'binary':
1272 if dodiff == 'binary':
1321 text = b85diff(to, tn)
1273 text = b85diff(to, tn)
1322 else:
1274 else:
1323 text = mdiff.unidiff(to, date1,
1275 text = mdiff.unidiff(to, date1,
1324 # ctx2 date may be dynamic
1276 # ctx2 date may be dynamic
1325 tn, util.datestr(ctx2.date()),
1277 tn, util.datestr(ctx2.date()),
1326 a, b, r, opts=opts)
1278 a, b, r, opts=opts)
1327 if text or len(header) > 1:
1279 if text or len(header) > 1:
1328 fp.write(''.join(header))
1280 fp.write(''.join(header))
1329 fp.write(text)
1281 fp.write(text)
1330
1282
1331 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1283 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1332 opts=None):
1284 opts=None):
1333 '''export changesets as hg patches.'''
1285 '''export changesets as hg patches.'''
1334
1286
1335 total = len(revs)
1287 total = len(revs)
1336 revwidth = max([len(str(rev)) for rev in revs])
1288 revwidth = max([len(str(rev)) for rev in revs])
1337
1289
1338 def single(rev, seqno, fp):
1290 def single(rev, seqno, fp):
1339 ctx = repo.changectx(rev)
1291 ctx = repo.changectx(rev)
1340 node = ctx.node()
1292 node = ctx.node()
1341 parents = [p.node() for p in ctx.parents() if p]
1293 parents = [p.node() for p in ctx.parents() if p]
1342 branch = ctx.branch()
1294 branch = ctx.branch()
1343 if switch_parent:
1295 if switch_parent:
1344 parents.reverse()
1296 parents.reverse()
1345 prev = (parents and parents[0]) or nullid
1297 prev = (parents and parents[0]) or nullid
1346
1298
1347 if not fp:
1299 if not fp:
1348 fp = cmdutil.make_file(repo, template, node, total=total,
1300 fp = cmdutil.make_file(repo, template, node, total=total,
1349 seqno=seqno, revwidth=revwidth)
1301 seqno=seqno, revwidth=revwidth)
1350 if fp != sys.stdout and hasattr(fp, 'name'):
1302 if fp != sys.stdout and hasattr(fp, 'name'):
1351 repo.ui.note("%s\n" % fp.name)
1303 repo.ui.note("%s\n" % fp.name)
1352
1304
1353 fp.write("# HG changeset patch\n")
1305 fp.write("# HG changeset patch\n")
1354 fp.write("# User %s\n" % ctx.user())
1306 fp.write("# User %s\n" % ctx.user())
1355 fp.write("# Date %d %d\n" % ctx.date())
1307 fp.write("# Date %d %d\n" % ctx.date())
1356 if branch and (branch != 'default'):
1308 if branch and (branch != 'default'):
1357 fp.write("# Branch %s\n" % branch)
1309 fp.write("# Branch %s\n" % branch)
1358 fp.write("# Node ID %s\n" % hex(node))
1310 fp.write("# Node ID %s\n" % hex(node))
1359 fp.write("# Parent %s\n" % hex(prev))
1311 fp.write("# Parent %s\n" % hex(prev))
1360 if len(parents) > 1:
1312 if len(parents) > 1:
1361 fp.write("# Parent %s\n" % hex(parents[1]))
1313 fp.write("# Parent %s\n" % hex(parents[1]))
1362 fp.write(ctx.description().rstrip())
1314 fp.write(ctx.description().rstrip())
1363 fp.write("\n\n")
1315 fp.write("\n\n")
1364
1316
1365 diff(repo, prev, node, fp=fp, opts=opts)
1317 diff(repo, prev, node, fp=fp, opts=opts)
1366 if fp not in (sys.stdout, repo.ui):
1318 if fp not in (sys.stdout, repo.ui):
1367 fp.close()
1319 fp.close()
1368
1320
1369 for seqno, rev in enumerate(revs):
1321 for seqno, rev in enumerate(revs):
1370 single(rev, seqno+1, fp)
1322 single(rev, seqno+1, fp)
1371
1323
1372 def diffstat(patchlines):
1324 def diffstat(patchlines):
1373 if not util.find_exe('diffstat'):
1325 if not util.find_exe('diffstat'):
1374 return
1326 return
1375 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1327 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1376 try:
1328 try:
1377 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1329 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1378 try:
1330 try:
1379 for line in patchlines:
1331 for line in patchlines:
1380 p.tochild.write(line + "\n")
1332 p.tochild.write(line + "\n")
1381 p.tochild.close()
1333 p.tochild.close()
1382 if p.wait(): return
1334 if p.wait(): return
1383 fp = os.fdopen(fd, 'r')
1335 fp = os.fdopen(fd, 'r')
1384 stat = []
1336 stat = []
1385 for line in fp: stat.append(line.lstrip())
1337 for line in fp: stat.append(line.lstrip())
1386 last = stat.pop()
1338 last = stat.pop()
1387 stat.insert(0, last)
1339 stat.insert(0, last)
1388 stat = ''.join(stat)
1340 stat = ''.join(stat)
1389 return stat
1341 return stat
1390 except: raise
1342 except: raise
1391 finally:
1343 finally:
1392 try: os.unlink(name)
1344 try: os.unlink(name)
1393 except: pass
1345 except: pass
General Comments 0
You need to be logged in to leave comments. Login now